json-schema-compatibility-checker 1.0.5 → 1.0.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{condition-resolver.d.ts → cjs/condition-resolver.d.ts} +2 -2
- package/dist/cjs/condition-resolver.js +2 -0
- package/dist/cjs/condition-resolver.js.map +1 -0
- package/dist/cjs/format-validator.js +2 -0
- package/dist/cjs/format-validator.js.map +1 -0
- package/dist/{formatter.d.ts → cjs/formatter.d.ts} +1 -1
- package/dist/cjs/formatter.js +2 -0
- package/dist/cjs/formatter.js.map +1 -0
- package/dist/cjs/index.d.ts +5 -0
- package/dist/cjs/index.js +2 -0
- package/dist/cjs/index.js.map +1 -0
- package/dist/{json-schema-compatibility-checker.d.ts → cjs/json-schema-compatibility-checker.d.ts} +7 -7
- package/dist/cjs/json-schema-compatibility-checker.js +2 -0
- package/dist/cjs/json-schema-compatibility-checker.js.map +1 -0
- package/dist/cjs/merge-engine.js +2 -0
- package/dist/cjs/merge-engine.js.map +1 -0
- package/dist/cjs/normalizer.js +2 -0
- package/dist/cjs/normalizer.js.map +1 -0
- package/dist/cjs/package.json +3 -0
- package/dist/cjs/pattern-subset.js +2 -0
- package/dist/cjs/pattern-subset.js.map +1 -0
- package/dist/{semantic-errors.d.ts → cjs/semantic-errors.d.ts} +1 -1
- package/dist/cjs/semantic-errors.js +2 -0
- package/dist/cjs/semantic-errors.js.map +1 -0
- package/dist/{subset-checker.d.ts → cjs/subset-checker.d.ts} +2 -2
- package/dist/cjs/subset-checker.js +2 -0
- package/dist/cjs/subset-checker.js.map +1 -0
- package/dist/cjs/types.js +2 -0
- package/dist/cjs/types.js.map +1 -0
- package/dist/cjs/utils.js +2 -0
- package/dist/cjs/utils.js.map +1 -0
- package/dist/esm/condition-resolver.d.ts +26 -0
- package/dist/esm/condition-resolver.js +2 -0
- package/dist/esm/condition-resolver.js.map +1 -0
- package/dist/esm/format-validator.d.ts +78 -0
- package/dist/esm/format-validator.js +2 -0
- package/dist/esm/format-validator.js.map +1 -0
- package/dist/esm/formatter.d.ts +22 -0
- package/dist/esm/formatter.js +2 -0
- package/dist/{chunk-7mkqk5qv.js.map → esm/formatter.js.map} +1 -10
- package/dist/esm/index.d.ts +5 -0
- package/dist/esm/index.js +2 -0
- package/dist/esm/index.js.map +1 -0
- package/dist/esm/json-schema-compatibility-checker.d.ts +73 -0
- package/dist/esm/json-schema-compatibility-checker.js +2 -0
- package/dist/esm/json-schema-compatibility-checker.js.map +1 -0
- package/dist/esm/merge-engine.d.ts +30 -0
- package/dist/esm/merge-engine.js +2 -0
- package/dist/esm/merge-engine.js.map +1 -0
- package/dist/esm/normalizer.d.ts +24 -0
- package/dist/esm/normalizer.js +2 -0
- package/dist/esm/normalizer.js.map +1 -0
- package/dist/esm/pattern-subset.d.ts +59 -0
- package/dist/esm/pattern-subset.js +2 -0
- package/dist/esm/pattern-subset.js.map +1 -0
- package/dist/esm/semantic-errors.d.ts +24 -0
- package/dist/esm/semantic-errors.js +2 -0
- package/dist/esm/semantic-errors.js.map +1 -0
- package/dist/esm/subset-checker.d.ts +81 -0
- package/dist/esm/subset-checker.js +2 -0
- package/dist/esm/subset-checker.js.map +1 -0
- package/dist/esm/types.d.ts +29 -0
- package/dist/esm/types.js +2 -0
- package/dist/esm/types.js.map +1 -0
- package/dist/esm/utils.d.ts +43 -0
- package/dist/esm/utils.js +2 -0
- package/dist/esm/utils.js.map +1 -0
- package/package.json +26 -11
- package/dist/chunk-159ezrfm.js +0 -5
- package/dist/chunk-159ezrfm.js.map +0 -10
- package/dist/chunk-1xda2xvb.js +0 -5
- package/dist/chunk-1xda2xvb.js.map +0 -10
- package/dist/chunk-3gazezx2.js +0 -5
- package/dist/chunk-3gazezx2.js.map +0 -10
- package/dist/chunk-7mkqk5qv.js +0 -6
- package/dist/chunk-aemw3jv0.js +0 -5
- package/dist/chunk-aemw3jv0.js.map +0 -10
- package/dist/chunk-hrwygqa2.js +0 -5
- package/dist/chunk-hrwygqa2.js.map +0 -10
- package/dist/chunk-jg89j4nd.js +0 -5
- package/dist/chunk-jg89j4nd.js.map +0 -10
- package/dist/chunk-kncywgnx.js +0 -5
- package/dist/chunk-kncywgnx.js.map +0 -10
- package/dist/chunk-nkpsq34q.js +0 -5
- package/dist/chunk-nkpsq34q.js.map +0 -10
- package/dist/chunk-nn3cjjtp.js +0 -5
- package/dist/chunk-nn3cjjtp.js.map +0 -10
- package/dist/condition-resolver.js +0 -4
- package/dist/condition-resolver.js.map +0 -9
- package/dist/format-validator.js +0 -4
- package/dist/format-validator.js.map +0 -9
- package/dist/formatter.js +0 -4
- package/dist/formatter.js.map +0 -9
- package/dist/index.d.ts +0 -5
- package/dist/index.js +0 -4
- package/dist/index.js.map +0 -9
- package/dist/json-schema-compatibility-checker.js +0 -4
- package/dist/json-schema-compatibility-checker.js.map +0 -9
- package/dist/merge-engine.js +0 -4
- package/dist/merge-engine.js.map +0 -9
- package/dist/normalizer.js +0 -4
- package/dist/normalizer.js.map +0 -9
- package/dist/pattern-subset.js +0 -4
- package/dist/pattern-subset.js.map +0 -9
- package/dist/semantic-errors.js +0 -4
- package/dist/semantic-errors.js.map +0 -9
- package/dist/subset-checker.js +0 -4
- package/dist/subset-checker.js.map +0 -9
- package/dist/types.js +0 -3
- package/dist/types.js.map +0 -9
- package/dist/utils.js +0 -4
- package/dist/utils.js.map +0 -9
- /package/dist/{format-validator.d.ts → cjs/format-validator.d.ts} +0 -0
- /package/dist/{merge-engine.d.ts → cjs/merge-engine.d.ts} +0 -0
- /package/dist/{normalizer.d.ts → cjs/normalizer.d.ts} +0 -0
- /package/dist/{pattern-subset.d.ts → cjs/pattern-subset.d.ts} +0 -0
- /package/dist/{types.d.ts → cjs/types.d.ts} +0 -0
- /package/dist/{utils.d.ts → cjs/utils.d.ts} +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import type { JSONSchema7 } from "json-schema";
|
|
2
|
-
import type { MergeEngine } from "./merge-engine";
|
|
3
|
-
import type { ResolvedConditionResult } from "./types";
|
|
2
|
+
import type { MergeEngine } from "./merge-engine.js";
|
|
3
|
+
import type { ResolvedConditionResult } from "./types.js";
|
|
4
4
|
/**
|
|
5
5
|
* Résout les `if/then/else` d'un schema en évaluant le `if` contre
|
|
6
6
|
* des données partielles (discriminants).
|
|
@@ -0,0 +1,2 @@
|
|
|
1
|
+
"use strict";Object.defineProperty(exports,"__esModule",{value:true});Object.defineProperty(exports,"resolveConditions",{enumerable:true,get:function(){return resolveConditions}});const _formatvalidator=require("./format-validator.js");const _normalizer=require("./normalizer.js");const _utils=require("./utils.js");const SPECIAL_MERGE_KEYS=new Set(["required","properties","dependencies"]);const SUB_SCHEMA_KEYS=new Set(["additionalProperties","items","contains","propertyNames","not"]);const MIN_KEYS=new Set(["minimum","exclusiveMinimum","minLength","minItems","minProperties"]);const MAX_KEYS=new Set(["maximum","exclusiveMaximum","maxLength","maxItems","maxProperties"]);function matchesType(value,type){if(type===undefined)return true;const types=Array.isArray(type)?type:[type];const actualType=(0,_normalizer.inferType)(value);return types.some(t=>t===actualType||t==="number"&&actualType==="integer")}function evaluateNumericConstraints(value,prop){if(prop.minimum!==undefined&&!(value>=prop.minimum))return false;if(prop.maximum!==undefined&&!(value<=prop.maximum))return false;if(prop.exclusiveMinimum!==undefined&&!(value>prop.exclusiveMinimum))return false;if(prop.exclusiveMaximum!==undefined&&!(value<prop.exclusiveMaximum))return false;if(prop.multipleOf!==undefined&&value%prop.multipleOf!==0)return false;return true}const patternRegexCache=new Map;function getOrCompileRegex(pattern){let regex=patternRegexCache.get(pattern);if(regex===undefined){regex=new RegExp(pattern);patternRegexCache.set(pattern,regex)}return regex}function evaluateStringConstraints(value,prop){if(prop.minLength!==undefined&&!(value.length>=prop.minLength))return false;if(prop.maxLength!==undefined&&!(value.length<=prop.maxLength))return false;if(prop.pattern!==undefined&&!getOrCompileRegex(prop.pattern).test(value))return false;return true}function evaluateArrayConstraints(value,prop){if(prop.minItems!==undefined&&!(value.length>=prop.minItems))return false;if(prop.maxItems!==undefined&&!(value.length<=prop.maxItems))return false;if(prop.uniqueItems===true){const len=value.length;for(let i=0;i<len;i++){for(let j=i+1;j<len;j++){if((0,_utils.deepEqual)(value[i],value[j]))return false}}}return true}function evaluateCondition(ifSchema,data){if((0,_utils.isPlainObj)(ifSchema.properties)){const propsOk=Object.keys(ifSchema.properties).every(key=>{const propDef=ifSchema.properties?.[key];if(typeof propDef==="boolean")return true;const prop=propDef;const value=data[key];if(value===undefined)return true;if((0,_utils.hasOwn)(prop,"const")){if(!(0,_utils.deepEqual)(value,prop.const))return false}if((0,_utils.hasOwn)(prop,"enum")){if(!prop.enum?.some(v=>(0,_utils.deepEqual)(v,value)))return false}if((0,_utils.hasOwn)(prop,"type")&&value!==undefined){if(!matchesType(value,prop.type))return false}if(typeof value==="number"){if(!evaluateNumericConstraints(value,prop))return false}if(typeof value==="string"){if(!evaluateStringConstraints(value,prop))return false}if(Array.isArray(value)){if(!evaluateArrayConstraints(value,prop))return false}if(prop.format!==undefined&&typeof value==="string"){const formatResult=(0,_formatvalidator.validateFormat)(value,prop.format);if(formatResult===false)return false}if((0,_utils.isPlainObj)(prop.properties)||Array.isArray(prop.required)){if((0,_utils.isPlainObj)(value)){if(!evaluateCondition(prop,value)){return false}}}return true});if(!propsOk)return false}if(Array.isArray(ifSchema.required)){const allRequired=ifSchema.required.every(key=>(0,_utils.hasOwn)(data,key));if(!allRequired)return false}if(Array.isArray(ifSchema.allOf)){const allMatch=ifSchema.allOf.every(entry=>{if(typeof entry==="boolean")return entry;return evaluateCondition(entry,data)});if(!allMatch)return false}if(Array.isArray(ifSchema.anyOf)){const anyMatch=ifSchema.anyOf.some(entry=>{if(typeof entry==="boolean")return entry;return evaluateCondition(entry,data)});if(!anyMatch)return false}if(Array.isArray(ifSchema.oneOf)){let matchCount=0;for(const entry of ifSchema.oneOf){const matches=typeof entry==="boolean"?entry:evaluateCondition(entry,data);if(matches)matchCount++;if(matchCount>1)break}if(matchCount!==1)return false}if((0,_utils.hasOwn)(ifSchema,"not")&&(0,_utils.isPlainObj)(ifSchema.not)&&typeof ifSchema.not!=="boolean"){const notResult=evaluateCondition(ifSchema.not,data);if(notResult)return false}return true}const DISCRIMINANT_INDICATORS=["const","enum","minimum","maximum","exclusiveMinimum","exclusiveMaximum","pattern","minLength","maxLength","multipleOf","minItems","maxItems","format"];function extractDiscriminants(ifSchema,data,out){if(!(0,_utils.isPlainObj)(ifSchema.properties))return;const props=ifSchema.properties;for(const key of Object.keys(props)){const propDef=props[key];if(typeof propDef==="boolean")continue;const prop=propDef;const hasIndicator=DISCRIMINANT_INDICATORS.some(indicator=>(0,_utils.hasOwn)(prop,indicator));if(hasIndicator&&(0,_utils.hasOwn)(data,key)){out[key]=data[key]}}}function mergeBranchInto(resolved,branchDef,engine){if(typeof branchDef==="boolean")return;const branchSchema=branchDef;if(Array.isArray(branchSchema.required)){resolved.required=(0,_utils.unionStrings)(resolved.required??[],branchSchema.required)}if((0,_utils.isPlainObj)(branchSchema.properties)){const branchProps=branchSchema.properties;const mergedProps={...resolved.properties??{}};for(const key of Object.keys(branchProps)){const branchProp=branchProps[key];if(branchProp===undefined)continue;const existing=resolved.properties?.[key];if(existing!==undefined&&typeof existing!=="boolean"&&typeof branchProp!=="boolean"){const merged=engine.merge(existing,branchProp);mergedProps[key]=merged??branchProp}else{mergedProps[key]=branchProp}}resolved.properties=mergedProps}if((0,_utils.isPlainObj)(branchSchema.dependencies)){const resolvedDeps=resolved.dependencies??{};const branchDeps=branchSchema.dependencies;const acc={...resolvedDeps};for(const depKey of Object.keys(branchDeps)){const branchVal=branchDeps[depKey];if(branchVal===undefined)continue;const existingVal=acc[depKey];if(existingVal===undefined){acc[depKey]=branchVal}else if(Array.isArray(existingVal)&&Array.isArray(branchVal)){acc[depKey]=(0,_utils.unionStrings)(existingVal,branchVal)}else if((0,_utils.isPlainObj)(existingVal)&&(0,_utils.isPlainObj)(branchVal)){const merged=engine.merge(existingVal,branchVal);acc[depKey]=merged??branchVal}else{acc[depKey]=branchVal}}resolved.dependencies=acc}for(const key of Object.keys(branchSchema)){if(SPECIAL_MERGE_KEYS.has(key))return;const branchVal=branchSchema[key];const resolvedVal=resolved[key];if(resolvedVal===undefined){resolved[key]=branchVal;return}if((0,_utils.deepEqual)(resolvedVal,branchVal))return;if(SUB_SCHEMA_KEYS.has(key)){const merged=engine.merge(resolvedVal,branchVal);if(merged!==null){resolved[key]=merged}else{resolved[key]=branchVal}return}if(MIN_KEYS.has(key)){if(typeof resolvedVal==="number"&&typeof branchVal==="number"){resolved[key]=Math.max(resolvedVal,branchVal)}else{resolved[key]=branchVal}return}if(MAX_KEYS.has(key)){if(typeof resolvedVal==="number"&&typeof branchVal==="number"){resolved[key]=Math.min(resolvedVal,branchVal)}else{resolved[key]=branchVal}return}if(key==="uniqueItems"){resolved[key]=resolvedVal===true||branchVal===true;return}if(key==="pattern"||key==="format"){resolved[key]=branchVal;return}const base={[key]:resolvedVal};const branch={[key]:branchVal};const merged=engine.merge(base,branch);if(merged&&typeof merged!=="boolean"&&(0,_utils.hasOwn)(merged,key)){resolved[key]=merged[key]}else{resolved[key]=branchVal}}}function resolveConditions(schema,data,engine){let branch=null;const discriminant={};const hasTopLevelIf=schema.if!==undefined;const hasAllOfConditions=Array.isArray(schema.allOf)&&schema.allOf.some(e=>typeof e!=="boolean"&&(0,_utils.hasOwn)(e,"if"));if(!hasTopLevelIf&&!hasAllOfConditions){const resolved=resolveNestedProperties(schema,data,engine,discriminant);return{resolved,branch,discriminant}}let resolved={...schema};if(hasAllOfConditions){resolved=resolveAllOfConditions(resolved,data,engine,discriminant)}if(resolved.if!==undefined){const ifSchema=resolved.if;const matches=evaluateCondition(ifSchema,data);extractDiscriminants(ifSchema,data,discriminant);const applicableBranch=matches?resolved.then:resolved.else;branch=matches?"then":"else";if(applicableBranch){mergeBranchInto(resolved,applicableBranch,engine)}delete resolved.if;delete resolved.then;delete resolved.else}resolved=resolveNestedProperties(resolved,data,engine,discriminant);return{resolved,branch,discriminant}}function resolveAllOfConditions(resolved,data,engine,discriminant){if(!Array.isArray(resolved.allOf))return resolved;const remainingAllOf=[];for(const entry of resolved.allOf){if(typeof entry==="boolean"){remainingAllOf.push(entry);continue}const subSchema=entry;if(subSchema.if===undefined){remainingAllOf.push(entry);continue}const ifSchema=subSchema.if;const matches=evaluateCondition(ifSchema,data);extractDiscriminants(ifSchema,data,discriminant);const applicableBranch=matches?subSchema.then:subSchema.else;if(applicableBranch){mergeBranchInto(resolved,applicableBranch,engine)}const remaining=(0,_utils.omitKeys)(subSchema,["if","then","else"]);if(Object.keys(remaining).length>0){remainingAllOf.push(remaining)}}resolved={...resolved};if(remainingAllOf.length===0){delete resolved.allOf}else{resolved.allOf=remainingAllOf}return resolved}function resolveNestedProperties(resolved,data,engine,discriminant){if(!(0,_utils.isPlainObj)(resolved.properties))return resolved;const props=resolved.properties;const propKeys=Object.keys(props);let changed=false;const resolvedProps={};for(const key of propKeys){const propDef=props[key];if(propDef===undefined)continue;if(typeof propDef==="boolean"){resolvedProps[key]=propDef;continue}const propSchema=propDef;const hasConditions=propSchema.if!==undefined||Array.isArray(propSchema.allOf)&&propSchema.allOf.some(e=>typeof e!=="boolean"&&(0,_utils.hasOwn)(e,"if"));if(!hasConditions){resolvedProps[key]=propDef;continue}const nestedData=(0,_utils.isPlainObj)(data[key])?data[key]:{};const nested=resolveConditions(propSchema,nestedData,engine);for(const dk of Object.keys(nested.discriminant)){discriminant[`${key}.${dk}`]=nested.discriminant[dk]}resolvedProps[key]=nested.resolved;changed=true}return changed?{...resolved,properties:resolvedProps}:resolved}
|
|
2
|
+
//# sourceMappingURL=condition-resolver.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/condition-resolver.ts"],"sourcesContent":["import type { JSONSchema7, JSONSchema7Definition } from \"json-schema\";\nimport { validateFormat } from \"./format-validator\";\nimport type { MergeEngine } from \"./merge-engine\";\nimport { inferType } from \"./normalizer\";\nimport type { ResolvedConditionResult } from \"./types\";\nimport { deepEqual, hasOwn, isPlainObj, omitKeys, unionStrings } from \"./utils\";\n\n// ─── Condition Resolver ──────────────────────────────────────────────────────\n//\n// Résout les `if/then/else` d'un schema en évaluant le `if` contre\n// des données partielles (discriminants).\n//\n// Stratégie :\n// 1. Évaluer si les données partielles satisfont le `if`\n// 2. Merger la branche applicable (`then` ou `else`) dans le schema de base\n// 3. Supprimer les mots-clés `if/then/else` du résultat\n// 4. Récurser dans les `properties` pour résoudre les conditions imbriquées\n//\n// L'évaluation du `if` (via `evaluateCondition`) gère :\n// - `properties` avec `const`, `enum`, `type`, contraintes numériques/string/array\n// - `required` (vérification de présence des clés)\n// - `allOf` (toutes les entrées doivent matcher — récursion) [2.1]\n// - `anyOf` (au moins une entrée doit matcher — récursion) [2.2]\n// - `oneOf` (exactement une entrée doit matcher — récursion) [2.3]\n// - `not` (inversion du résultat — récursion) [2.4]\n// - Propriétés imbriquées (nested objects — récursion) [2.5]\n// - `format` via `validateFormat` de `format-validator.ts` [2.6]\n//\n// Utilise lodash massivement :\n// - `_.has` / `_.get` pour l'accès sûr aux propriétés\n// - `_.every` / `_.some` pour les prédicats sur les collections\n// - `_.union` / `_.uniq` pour la fusion de tableaux (required, deps)\n// - `_.isArray` / `_.isPlainObject` pour le typage des valeurs\n// - `_.mapValues` pour transformer les propriétés\n// - `_.omit` / `_.pick` pour sélectionner/exclure des clés\n// - `_.keys` / `_.forEach` pour l'itération\n// - `_.reduce` pour accumuler les résultats\n// - `_.isEqual` pour la comparaison profonde\n// - `_.size` / `_.filter` pour le comptage (oneOf)\n\n// ─── Keywords classification ─────────────────────────────────────────────────\n\n/** Mots-clés qui ne doivent pas être traités par la boucle générique de mergeBranchInto */\nconst SPECIAL_MERGE_KEYS = new Set([\"required\", \"properties\", \"dependencies\"]);\n\n/** Mots-clés contenant un sous-schema unique (mergeable via engine.merge) */\nconst SUB_SCHEMA_KEYS = new Set([\n\t\"additionalProperties\",\n\t\"items\",\n\t\"contains\",\n\t\"propertyNames\",\n\t\"not\",\n]);\n\n/** Mots-clés numériques de type \"minimum\" (prendre le max pour être plus restrictif) */\nconst MIN_KEYS = new Set([\n\t\"minimum\",\n\t\"exclusiveMinimum\",\n\t\"minLength\",\n\t\"minItems\",\n\t\"minProperties\",\n]);\n\n/** Mots-clés numériques de type \"maximum\" (prendre le min pour être plus restrictif) */\nconst MAX_KEYS = new Set([\n\t\"maximum\",\n\t\"exclusiveMaximum\",\n\t\"maxLength\",\n\t\"maxItems\",\n\t\"maxProperties\",\n]);\n\n// ─── Condition evaluation (internal) ─────────────────────────────────────────\n\n/**\n * Vérifie si `value` correspond à un type JSON Schema.\n */\nfunction matchesType(value: unknown, type: JSONSchema7[\"type\"]): boolean {\n\tif (type === undefined) return true;\n\n\tconst types = Array.isArray(type) ? type : [type];\n\tconst actualType = inferType(value);\n\n\treturn types.some(\n\t\t(t) => t === actualType || (t === \"number\" && actualType === \"integer\"),\n\t);\n}\n\n/**\n * Évalue une contrainte numérique sur une valeur.\n * Point 5 — Enrichissement de evaluateCondition.\n */\nfunction evaluateNumericConstraints(value: number, prop: JSONSchema7): boolean {\n\tif (prop.minimum !== undefined && !(value >= prop.minimum)) return false;\n\tif (prop.maximum !== undefined && !(value <= prop.maximum)) return false;\n\tif (\n\t\tprop.exclusiveMinimum !== undefined &&\n\t\t!(value > (prop.exclusiveMinimum as number))\n\t)\n\t\treturn false;\n\tif (\n\t\tprop.exclusiveMaximum !== undefined &&\n\t\t!(value < (prop.exclusiveMaximum as number))\n\t)\n\t\treturn false;\n\tif (prop.multipleOf !== undefined && value % prop.multipleOf !== 0)\n\t\treturn false;\n\treturn true;\n}\n\n/**\n * Évalue une contrainte string sur une valeur.\n * Point 5 — Enrichissement de evaluateCondition.\n */\n/** Cache for compiled RegExp patterns used in evaluateStringConstraints */\nconst patternRegexCache = new Map<string, RegExp>();\n\nfunction getOrCompileRegex(pattern: string): RegExp {\n\tlet regex = patternRegexCache.get(pattern);\n\tif (regex === undefined) {\n\t\tregex = new RegExp(pattern);\n\t\tpatternRegexCache.set(pattern, regex);\n\t}\n\treturn regex;\n}\n\nfunction evaluateStringConstraints(value: string, prop: JSONSchema7): boolean {\n\tif (prop.minLength !== undefined && !(value.length >= prop.minLength))\n\t\treturn false;\n\tif (prop.maxLength !== undefined && !(value.length <= prop.maxLength))\n\t\treturn false;\n\tif (\n\t\tprop.pattern !== undefined &&\n\t\t!getOrCompileRegex(prop.pattern).test(value)\n\t)\n\t\treturn false;\n\treturn true;\n}\n\n/**\n * Évalue une contrainte array sur une valeur.\n * Point 5 — Enrichissement de evaluateCondition.\n */\nfunction evaluateArrayConstraints(\n\tvalue: unknown[],\n\tprop: JSONSchema7,\n): boolean {\n\tif (prop.minItems !== undefined && !(value.length >= prop.minItems))\n\t\treturn false;\n\tif (prop.maxItems !== undefined && !(value.length <= prop.maxItems))\n\t\treturn false;\n\tif (prop.uniqueItems === true) {\n\t\t// Vérifier l'unicité via deepEqual pour les éléments non-primitifs\n\t\t// Optimisation : double boucle sans slice pour éviter les allocations\n\t\tconst len = value.length;\n\t\tfor (let i = 0; i < len; i++) {\n\t\t\tfor (let j = i + 1; j < len; j++) {\n\t\t\t\tif (deepEqual(value[i], value[j])) return false;\n\t\t\t}\n\t\t}\n\t}\n\treturn true;\n}\n\n/**\n * Évalue si des données partielles satisfont un `if` schema.\n *\n * Stratégie pragmatique (pas un validateur complet) :\n * - Vérifie les `properties` avec `const`, `enum`, `type`\n * - Point 5 : Vérifie aussi minimum/maximum, minLength/maxLength,\n * pattern, multipleOf, minItems/maxItems, uniqueItems\n * - Vérifie les `required`\n * - 2.1 : `allOf` → toutes les entrées doivent matcher (récursion)\n * - 2.2 : `anyOf` → au moins une entrée doit matcher (récursion)\n * - 2.3 : `oneOf` → exactement une entrée doit matcher (récursion)\n * - 2.4 : `not` → inversion du résultat (récursion)\n * - 2.5 : Propriétés imbriquées → récursion sur les sous-objets\n * - 2.6 : `format` → validation via `validateFormat`\n *\n * Utilise `_.forEach` / `_.every` / `_.has` pour une itération idiomatique.\n */\nfunction evaluateCondition(\n\tifSchema: JSONSchema7,\n\tdata: Record<string, unknown>,\n): boolean {\n\tif (isPlainObj(ifSchema.properties)) {\n\t\tconst propsOk = Object.keys(ifSchema.properties).every((key) => {\n\t\t\tconst propDef = ifSchema.properties?.[key];\n\t\t\tif (typeof propDef === \"boolean\") return true;\n\t\t\tconst prop = propDef as JSONSchema7;\n\t\t\tconst value = data[key];\n\n\t\t\t// ── Propriété absente → skip ──\n\t\t\t// Selon la spec JSON Schema Draft-07, le keyword `properties` ne valide\n\t\t\t// une propriété que si elle est **présente** dans l'instance.\n\t\t\t// C'est le keyword `required` qui gère la présence obligatoire.\n\t\t\tif (value === undefined) return true;\n\n\t\t\t// ── const ──\n\t\t\tif (hasOwn(prop, \"const\")) {\n\t\t\t\tif (!deepEqual(value, prop.const)) return false;\n\t\t\t}\n\n\t\t\t// ── enum ──\n\t\t\tif (hasOwn(prop, \"enum\")) {\n\t\t\t\tif (!prop.enum?.some((v) => deepEqual(v, value))) return false;\n\t\t\t}\n\n\t\t\t// ── type ──\n\t\t\tif (hasOwn(prop, \"type\") && value !== undefined) {\n\t\t\t\tif (!matchesType(value, prop.type)) return false;\n\t\t\t}\n\n\t\t\t// ── Point 5 : Contraintes numériques/string/array ──\n\t\t\t// Quand `value` est `undefined`, aucun de ces blocs ne s'exécute\n\t\t\t// (`typeof undefined` vaut `\"undefined\"`, pas `\"number\"` ni `\"string\"`,\n\t\t\t// et `isArray(undefined)` retourne `false`).\n\t\t\t// C'est le comportement voulu : on ne peut pas évaluer une contrainte\n\t\t\t// sur une donnée absente → on skip, cohérent avec la logique pragmatique.\n\t\t\tif (typeof value === \"number\") {\n\t\t\t\tif (!evaluateNumericConstraints(value, prop)) return false;\n\t\t\t}\n\n\t\t\tif (typeof value === \"string\") {\n\t\t\t\tif (!evaluateStringConstraints(value, prop)) return false;\n\t\t\t}\n\n\t\t\tif (Array.isArray(value)) {\n\t\t\t\tif (!evaluateArrayConstraints(value as unknown[], prop)) return false;\n\t\t\t}\n\n\t\t\t// ── 2.6 — format ──\n\t\t\t// Valide la valeur contre le format via class-validator.\n\t\t\t// Le format ne s'applique qu'aux strings en Draft-07.\n\t\t\t// Si le format est inconnu → skip (retourne null → on continue).\n\t\t\tif (prop.format !== undefined && typeof value === \"string\") {\n\t\t\t\tconst formatResult = validateFormat(value, prop.format);\n\t\t\t\tif (formatResult === false) return false;\n\t\t\t\t// null (format inconnu) → skip, cohérent avec l'approche pragmatique\n\t\t\t}\n\n\t\t\t// ── 2.5 — Propriétés imbriquées (nested objects) ──\n\t\t\t// Si la propriété elle-même a des `properties` ou un `required`,\n\t\t\t// et que la valeur dans data est un objet, récurser dans evaluateCondition\n\t\t\t// en passant la sous-donnée comme nouveau `data`.\n\t\t\t// Si data[key] n'est pas un objet, on skip (retourne true pour cette prop,\n\t\t\t// cohérent avec \"absence = pas de contrainte\").\n\t\t\tif (isPlainObj(prop.properties) || Array.isArray(prop.required)) {\n\t\t\t\tif (isPlainObj(value)) {\n\t\t\t\t\tif (!evaluateCondition(prop, value as Record<string, unknown>)) {\n\t\t\t\t\t\treturn false;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\t// value n'est pas un objet → skip, on ne peut pas évaluer les sous-props\n\t\t\t}\n\n\t\t\treturn true;\n\t\t});\n\t\tif (!propsOk) return false;\n\t}\n\n\t// ── required ──\n\tif (Array.isArray(ifSchema.required)) {\n\t\tconst allRequired = ifSchema.required.every((key) =>\n\t\t\thasOwn(data, key as string),\n\t\t);\n\t\tif (!allRequired) return false;\n\t}\n\n\t// ── 2.1 — allOf ──\n\t// Toutes les entrées du allOf doivent matcher (évaluation récursive).\n\tif (Array.isArray(ifSchema.allOf)) {\n\t\tconst allMatch = ifSchema.allOf.every((entry) => {\n\t\t\tif (typeof entry === \"boolean\") return entry;\n\t\t\treturn evaluateCondition(entry as JSONSchema7, data);\n\t\t});\n\t\tif (!allMatch) return false;\n\t}\n\n\t// ── 2.2 — anyOf ──\n\t// Au moins une entrée du anyOf doit matcher (évaluation récursive).\n\tif (Array.isArray(ifSchema.anyOf)) {\n\t\tconst anyMatch = ifSchema.anyOf.some((entry) => {\n\t\t\tif (typeof entry === \"boolean\") return entry;\n\t\t\treturn evaluateCondition(entry as JSONSchema7, data);\n\t\t});\n\t\tif (!anyMatch) return false;\n\t}\n\n\t// ── 2.3 — oneOf ──\n\t// Exactement une entrée du oneOf doit matcher (évaluation récursive).\n\tif (Array.isArray(ifSchema.oneOf)) {\n\t\tlet matchCount = 0;\n\t\tfor (const entry of ifSchema.oneOf) {\n\t\t\tconst matches =\n\t\t\t\ttypeof entry === \"boolean\"\n\t\t\t\t\t? entry\n\t\t\t\t\t: evaluateCondition(entry as JSONSchema7, data);\n\t\t\tif (matches) matchCount++;\n\t\t\tif (matchCount > 1) break;\n\t\t}\n\t\tif (matchCount !== 1) return false;\n\t}\n\n\t// ── 2.4 — not ──\n\t// Inverser le résultat de l'évaluation du contenu du `not`.\n\tif (\n\t\thasOwn(ifSchema, \"not\") &&\n\t\tisPlainObj(ifSchema.not) &&\n\t\ttypeof ifSchema.not !== \"boolean\"\n\t) {\n\t\tconst notResult = evaluateCondition(ifSchema.not as JSONSchema7, data);\n\t\tif (notResult) return false; // Le not matche → la condition not ne matche pas\n\t}\n\n\treturn true;\n}\n\n// ─── Discriminant extraction ─────────────────────────────────────────────────\n\n/**\n * Mots-clés qui indiquent qu'une propriété est un discriminant\n * (sa valeur dans les données est utilisée pour la résolution).\n *\n * Point 5 — Étendu avec les contraintes numériques/string/pattern.\n */\nconst DISCRIMINANT_INDICATORS = [\n\t\"const\",\n\t\"enum\",\n\t\"minimum\",\n\t\"maximum\",\n\t\"exclusiveMinimum\",\n\t\"exclusiveMaximum\",\n\t\"pattern\",\n\t\"minLength\",\n\t\"maxLength\",\n\t\"multipleOf\",\n\t\"minItems\",\n\t\"maxItems\",\n\t\"format\",\n] as const;\n\n/**\n * Extrait les valeurs discriminantes utilisées dans un `if` schema\n * depuis les données partielles.\n *\n * Point 5 — Collecte aussi les discriminants pour les nouvelles contraintes\n * (minimum, maximum, pattern, etc.).\n *\n * Utilise `_.some` pour vérifier qu'au moins un indicateur est présent,\n * et `_.has` pour un accès sûr.\n */\nfunction extractDiscriminants(\n\tifSchema: JSONSchema7,\n\tdata: Record<string, unknown>,\n\tout: Record<string, unknown>,\n): void {\n\tif (!isPlainObj(ifSchema.properties)) return;\n\n\tconst props = ifSchema.properties as Record<string, JSONSchema7Definition>;\n\tfor (const key of Object.keys(props)) {\n\t\tconst propDef = props[key];\n\t\tif (typeof propDef === \"boolean\") continue;\n\t\tconst prop = propDef as JSONSchema7;\n\n\t\t// Collecter si au moins un indicateur de discriminant est présent\n\t\tconst hasIndicator = DISCRIMINANT_INDICATORS.some((indicator) =>\n\t\t\thasOwn(prop, indicator),\n\t\t);\n\n\t\tif (hasIndicator && hasOwn(data, key)) {\n\t\t\tout[key] = data[key];\n\t\t}\n\t}\n}\n\n// ─── Branch merging (deduplicated) ───────────────────────────────────────────\n\n/**\n * Merge une branche conditionnelle (`then` ou `else`) dans le schema résolu.\n *\n * Point 4 — Fix first-writer-wins :\n * Au lieu d'ignorer les keywords déjà présents dans `resolved`,\n * on tente un merge intelligent selon le type de keyword :\n *\n * - `required` → union dédupliquée via `_.union`\n * - `properties` → merge individuel via engine.merge\n * - `dependencies` → Point 3 : union des tableaux (forme 1),\n * merge des schemas (forme 2) via `_.mapValues`\n * - Sub-schema keys → merge via engine.merge\n * - Min keys → `Math.max` (plus restrictif)\n * - Max keys → `Math.min` (plus restrictif)\n * - `uniqueItems` → `true` gagne sur `false`\n * - `pattern` / `format` → la branche gagne (plus spécifique)\n * - Autres → tentative de merge via engine, sinon la branche gagne\n *\n * Utilise lodash massivement pour chaque opération de merge.\n */\nfunction mergeBranchInto(\n\tresolved: JSONSchema7,\n\tbranchDef: JSONSchema7Definition,\n\tengine: MergeEngine,\n): void {\n\tif (typeof branchDef === \"boolean\") return;\n\n\tconst branchSchema = branchDef as JSONSchema7;\n\n\t// ── Merger required via _.union (dédupliquée automatiquement) ──\n\tif (Array.isArray(branchSchema.required)) {\n\t\tresolved.required = unionStrings(\n\t\t\tresolved.required ?? [],\n\t\t\tbranchSchema.required,\n\t\t);\n\t}\n\n\t// ── Merger properties ──\n\tif (isPlainObj(branchSchema.properties)) {\n\t\tconst branchProps = branchSchema.properties as Record<\n\t\t\tstring,\n\t\t\tJSONSchema7Definition\n\t\t>;\n\t\tconst mergedProps: Record<string, JSONSchema7Definition> = {\n\t\t\t...(resolved.properties ?? {}),\n\t\t};\n\t\tfor (const key of Object.keys(branchProps)) {\n\t\t\tconst branchProp = branchProps[key];\n\t\t\tif (branchProp === undefined) continue;\n\t\t\tconst existing = resolved.properties?.[key];\n\t\t\tif (\n\t\t\t\texisting !== undefined &&\n\t\t\t\ttypeof existing !== \"boolean\" &&\n\t\t\t\ttypeof branchProp !== \"boolean\"\n\t\t\t) {\n\t\t\t\tconst merged = engine.merge(\n\t\t\t\t\texisting as JSONSchema7Definition,\n\t\t\t\t\tbranchProp as JSONSchema7Definition,\n\t\t\t\t);\n\t\t\t\tmergedProps[key] = (merged ?? branchProp) as JSONSchema7Definition;\n\t\t\t} else {\n\t\t\t\tmergedProps[key] = branchProp;\n\t\t\t}\n\t\t}\n\t\tresolved.properties = mergedProps;\n\t}\n\n\t// ── Merger dependencies (Point 3) ──\n\tif (isPlainObj(branchSchema.dependencies)) {\n\t\tconst resolvedDeps = (resolved.dependencies ?? {}) as Record<\n\t\t\tstring,\n\t\t\tJSONSchema7Definition | string[]\n\t\t>;\n\t\tconst branchDeps = branchSchema.dependencies as Record<\n\t\t\tstring,\n\t\t\tJSONSchema7Definition | string[]\n\t\t>;\n\n\t\tconst acc = { ...resolvedDeps };\n\t\tfor (const depKey of Object.keys(branchDeps)) {\n\t\t\tconst branchVal = branchDeps[depKey] as\n\t\t\t\t| JSONSchema7Definition\n\t\t\t\t| string[]\n\t\t\t\t| undefined;\n\t\t\tif (branchVal === undefined) continue;\n\t\t\tconst existingVal = acc[depKey] as\n\t\t\t\t| JSONSchema7Definition\n\t\t\t\t| string[]\n\t\t\t\t| undefined;\n\n\t\t\tif (existingVal === undefined) {\n\t\t\t\t// Pas de valeur existante → copier directement\n\t\t\t\tacc[depKey] = branchVal;\n\t\t\t} else if (Array.isArray(existingVal) && Array.isArray(branchVal)) {\n\t\t\t\t// Forme 1 : union dédupliquée des tableaux de strings\n\t\t\t\tacc[depKey] = unionStrings(\n\t\t\t\t\texistingVal as string[],\n\t\t\t\t\tbranchVal as string[],\n\t\t\t\t);\n\t\t\t} else if (isPlainObj(existingVal) && isPlainObj(branchVal)) {\n\t\t\t\t// Forme 2 : merge des sous-schemas\n\t\t\t\tconst merged = engine.merge(\n\t\t\t\t\texistingVal as JSONSchema7Definition,\n\t\t\t\t\tbranchVal as JSONSchema7Definition,\n\t\t\t\t);\n\t\t\t\tacc[depKey] = (merged ?? branchVal) as JSONSchema7Definition;\n\t\t\t} else {\n\t\t\t\t// Types incompatibles (tableau vs schema) → la branche gagne\n\t\t\t\tacc[depKey] = branchVal;\n\t\t\t}\n\t\t}\n\t\tresolved.dependencies = acc as Record<\n\t\t\tstring,\n\t\t\tJSONSchema7Definition | string[]\n\t\t>;\n\t}\n\n\t// ── Merger les autres mots-clés (Point 4 — fix first-writer-wins) ──\n\tfor (const key of Object.keys(branchSchema) as (keyof JSONSchema7)[]) {\n\t\t// Skip les clés déjà traitées ci-dessus\n\t\tif (SPECIAL_MERGE_KEYS.has(key)) return;\n\n\t\tconst branchVal = branchSchema[key];\n\t\tconst resolvedVal = resolved[key];\n\n\t\t// Si le resolved n'a pas cette clé → copier directement\n\t\tif (resolvedVal === undefined) {\n\t\t\t(resolved as Record<string, unknown>)[key] = branchVal;\n\t\t\treturn;\n\t\t}\n\n\t\t// Si les deux ont la même valeur → rien à faire\n\t\tif (deepEqual(resolvedVal, branchVal)) return;\n\n\t\t// ── Sub-schema keys → merge via engine ──\n\t\tif (SUB_SCHEMA_KEYS.has(key)) {\n\t\t\tconst merged = engine.merge(\n\t\t\t\tresolvedVal as JSONSchema7Definition,\n\t\t\t\tbranchVal as JSONSchema7Definition,\n\t\t\t);\n\t\t\tif (merged !== null) {\n\t\t\t\t(resolved as Record<string, unknown>)[key] = merged;\n\t\t\t} else {\n\t\t\t\t// Merge impossible → la branche gagne (contexte conditionnel)\n\t\t\t\t(resolved as Record<string, unknown>)[key] = branchVal;\n\t\t\t}\n\t\t\treturn;\n\t\t}\n\n\t\t// ── Min keys → Math.max (plus restrictif) ──\n\t\tif (MIN_KEYS.has(key)) {\n\t\t\tif (typeof resolvedVal === \"number\" && typeof branchVal === \"number\") {\n\t\t\t\t(resolved as Record<string, unknown>)[key] = Math.max(\n\t\t\t\t\tresolvedVal,\n\t\t\t\t\tbranchVal,\n\t\t\t\t);\n\t\t\t} else {\n\t\t\t\t(resolved as Record<string, unknown>)[key] = branchVal;\n\t\t\t}\n\t\t\treturn;\n\t\t}\n\n\t\t// ── Max keys → Math.min (plus restrictif) ──\n\t\tif (MAX_KEYS.has(key)) {\n\t\t\tif (typeof resolvedVal === \"number\" && typeof branchVal === \"number\") {\n\t\t\t\t(resolved as Record<string, unknown>)[key] = Math.min(\n\t\t\t\t\tresolvedVal,\n\t\t\t\t\tbranchVal,\n\t\t\t\t);\n\t\t\t} else {\n\t\t\t\t(resolved as Record<string, unknown>)[key] = branchVal;\n\t\t\t}\n\t\t\treturn;\n\t\t}\n\n\t\t// ── uniqueItems → true gagne sur false ──\n\t\tif (key === \"uniqueItems\") {\n\t\t\t(resolved as Record<string, unknown>)[key] =\n\t\t\t\tresolvedVal === true || branchVal === true;\n\t\t\treturn;\n\t\t}\n\n\t\t// ── pattern / format → la branche gagne (plus spécifique au contexte) ──\n\t\tif (key === \"pattern\" || key === \"format\") {\n\t\t\t(resolved as Record<string, unknown>)[key] = branchVal;\n\t\t\treturn;\n\t\t}\n\n\t\t// ── Fallback : tentative de merge via engine pour les cas restants ──\n\t\tconst base = { [key]: resolvedVal } as JSONSchema7Definition;\n\t\tconst branch = { [key]: branchVal } as JSONSchema7Definition;\n\t\tconst merged = engine.merge(base, branch);\n\t\tif (\n\t\t\tmerged &&\n\t\t\ttypeof merged !== \"boolean\" &&\n\t\t\thasOwn(merged as object, key)\n\t\t) {\n\t\t\t(resolved as Record<string, unknown>)[key] = (\n\t\t\t\tmerged as unknown as Record<string, unknown>\n\t\t\t)[key];\n\t\t} else {\n\t\t\t// Merge échoué → la branche gagne (contexte conditionnel applicable)\n\t\t\t(resolved as Record<string, unknown>)[key] = branchVal;\n\t\t}\n\t}\n}\n\n// ─── Public API ──────────────────────────────────────────────────────────────\n\n/**\n * Résout les `if/then/else` d'un schema en évaluant le `if` contre\n * des données partielles (discriminants).\n *\n * @param schema Le schema contenant potentiellement des if/then/else\n * @param data Données partielles utilisées pour évaluer les conditions\n * @param engine Le MergeEngine pour merger les branches\n *\n * @example\n * ```ts\n * const form = {\n * type: \"object\",\n * properties: { accountType: { type: \"string\" }, ... },\n * if: { properties: { accountType: { const: \"business\" } } },\n * then: { required: [\"companyName\"] },\n * else: { required: [\"firstName\"] },\n * };\n *\n * const { resolved } = resolveConditions(form, { accountType: \"business\" }, engine);\n * // → resolved n'a plus de if/then/else, mais a required: [\"companyName\"]\n * ```\n */\nexport function resolveConditions(\n\tschema: JSONSchema7,\n\tdata: Record<string, unknown>,\n\tengine: MergeEngine,\n): ResolvedConditionResult {\n\tlet branch: \"then\" | \"else\" | null = null;\n\tconst discriminant: Record<string, unknown> = {};\n\n\t// ── Fast path: no conditions at all ──\n\t// If there's no `if` and no `allOf` with conditions, skip the copy entirely.\n\tconst hasTopLevelIf = schema.if !== undefined;\n\tconst hasAllOfConditions =\n\t\tArray.isArray(schema.allOf) &&\n\t\tschema.allOf.some(\n\t\t\t(e) => typeof e !== \"boolean\" && hasOwn(e as object, \"if\"),\n\t\t);\n\n\tif (!hasTopLevelIf && !hasAllOfConditions) {\n\t\t// Phase 3 only: check nested properties (resolveNestedProperties\n\t\t// already returns the original if nothing changes)\n\t\tconst resolved = resolveNestedProperties(\n\t\t\tschema,\n\t\t\tdata,\n\t\t\tengine,\n\t\t\tdiscriminant,\n\t\t);\n\t\treturn { resolved, branch, discriminant };\n\t}\n\n\t// ── Copy-on-write: only copy when mutations are needed ──\n\tlet resolved = { ...schema };\n\n\t// ── Phase 1 : Résoudre les if/then/else dans allOf ──\n\tif (hasAllOfConditions) {\n\t\tresolved = resolveAllOfConditions(resolved, data, engine, discriminant);\n\t}\n\n\t// ── Phase 2 : Résoudre le if/then/else de ce niveau ──\n\tif (resolved.if !== undefined) {\n\t\tconst ifSchema = resolved.if as JSONSchema7;\n\t\tconst matches = evaluateCondition(ifSchema, data);\n\n\t\textractDiscriminants(ifSchema, data, discriminant);\n\n\t\tconst applicableBranch = matches ? resolved.then : resolved.else;\n\t\tbranch = matches ? \"then\" : \"else\";\n\n\t\tif (applicableBranch) {\n\t\t\tmergeBranchInto(\n\t\t\t\tresolved,\n\t\t\t\tapplicableBranch as JSONSchema7Definition,\n\t\t\t\tengine,\n\t\t\t);\n\t\t}\n\n\t\tdelete resolved.if;\n\t\tdelete resolved.then;\n\t\tdelete resolved.else;\n\t}\n\n\t// ── Phase 3 : Récurser dans les properties ──\n\tresolved = resolveNestedProperties(resolved, data, engine, discriminant);\n\n\treturn { resolved, branch, discriminant };\n}\n\n// ─── Internal phases ─────────────────────────────────────────────────────────\n\n/**\n * Phase 1 : Parcourt les entrées `allOf` et résout celles qui contiennent\n * un `if/then/else`. Les entrées non-conditionnelles sont préservées.\n *\n * Utilise `_.reduce` pour accumuler les entrées restantes et `_.filter`\n * pour séparer les clés conditionnelles des non-conditionnelles.\n */\nfunction resolveAllOfConditions(\n\tresolved: JSONSchema7,\n\tdata: Record<string, unknown>,\n\tengine: MergeEngine,\n\tdiscriminant: Record<string, unknown>,\n): JSONSchema7 {\n\tif (!Array.isArray(resolved.allOf)) return resolved;\n\n\tconst remainingAllOf: JSONSchema7Definition[] = [];\n\n\tfor (const entry of resolved.allOf) {\n\t\tif (typeof entry === \"boolean\") {\n\t\t\tremainingAllOf.push(entry);\n\t\t\tcontinue;\n\t\t}\n\n\t\tconst subSchema = entry as JSONSchema7;\n\n\t\tif (subSchema.if === undefined) {\n\t\t\tremainingAllOf.push(entry);\n\t\t\tcontinue;\n\t\t}\n\n\t\t// Résoudre la condition de cette entrée allOf\n\t\tconst ifSchema = subSchema.if as JSONSchema7;\n\t\tconst matches = evaluateCondition(ifSchema, data);\n\n\t\textractDiscriminants(ifSchema, data, discriminant);\n\n\t\tconst applicableBranch = matches ? subSchema.then : subSchema.else;\n\n\t\tif (applicableBranch) {\n\t\t\tmergeBranchInto(\n\t\t\t\tresolved,\n\t\t\t\tapplicableBranch as JSONSchema7Definition,\n\t\t\t\tengine,\n\t\t\t);\n\t\t}\n\n\t\t// Garder les parties non-conditionnelles de l'entrée allOf\n\t\tconst remaining = omitKeys(\n\t\t\tsubSchema as unknown as Record<string, unknown>,\n\t\t\t[\"if\", \"then\", \"else\"],\n\t\t);\n\t\tif (Object.keys(remaining).length > 0) {\n\t\t\tremainingAllOf.push(remaining as JSONSchema7);\n\t\t}\n\t}\n\n\tresolved = { ...resolved };\n\tif (remainingAllOf.length === 0) {\n\t\tdelete resolved.allOf;\n\t} else {\n\t\tresolved.allOf = remainingAllOf;\n\t}\n\n\treturn resolved;\n}\n\n/**\n * Phase 3 : Récurse dans les `properties` du schema résolu pour résoudre\n * les conditions imbriquées (ex: un objet dont une propriété a un if/then/else).\n *\n * Utilise `_.mapValues` pour transformer chaque propriété en une seule passe,\n * et `_.forEach` pour remonter les discriminants imbriqués.\n */\nfunction resolveNestedProperties(\n\tresolved: JSONSchema7,\n\tdata: Record<string, unknown>,\n\tengine: MergeEngine,\n\tdiscriminant: Record<string, unknown>,\n): JSONSchema7 {\n\tif (!isPlainObj(resolved.properties)) return resolved;\n\n\tconst props = resolved.properties as Record<string, JSONSchema7Definition>;\n\tconst propKeys = Object.keys(props);\n\tlet changed = false;\n\tconst resolvedProps: Record<string, JSONSchema7Definition> = {};\n\n\tfor (const key of propKeys) {\n\t\tconst propDef = props[key];\n\t\tif (propDef === undefined) continue;\n\t\tif (typeof propDef === \"boolean\") {\n\t\t\tresolvedProps[key] = propDef;\n\t\t\tcontinue;\n\t\t}\n\n\t\tconst propSchema = propDef as JSONSchema7;\n\t\tconst hasConditions =\n\t\t\tpropSchema.if !== undefined ||\n\t\t\t(Array.isArray(propSchema.allOf) &&\n\t\t\t\tpropSchema.allOf.some(\n\t\t\t\t\t(e) => typeof e !== \"boolean\" && hasOwn(e as object, \"if\"),\n\t\t\t\t));\n\n\t\tif (!hasConditions) {\n\t\t\tresolvedProps[key] = propDef;\n\t\t\tcontinue;\n\t\t}\n\n\t\t// Données imbriquées disponibles → résoudre récursivement\n\t\tconst nestedData = isPlainObj(data[key])\n\t\t\t? (data[key] as Record<string, unknown>)\n\t\t\t: {};\n\n\t\tconst nested = resolveConditions(propSchema, nestedData, engine);\n\n\t\t// Remonter les discriminants imbriqués avec prefix\n\t\tfor (const dk of Object.keys(nested.discriminant)) {\n\t\t\tdiscriminant[`${key}.${dk}`] = nested.discriminant[dk];\n\t\t}\n\n\t\tresolvedProps[key] = nested.resolved;\n\t\tchanged = true;\n\t}\n\n\treturn changed ? { ...resolved, properties: resolvedProps } : resolved;\n}\n"],"names":["resolveConditions","SPECIAL_MERGE_KEYS","Set","SUB_SCHEMA_KEYS","MIN_KEYS","MAX_KEYS","matchesType","value","type","undefined","types","Array","isArray","actualType","inferType","some","t","evaluateNumericConstraints","prop","minimum","maximum","exclusiveMinimum","exclusiveMaximum","multipleOf","patternRegexCache","Map","getOrCompileRegex","pattern","regex","get","RegExp","set","evaluateStringConstraints","minLength","length","maxLength","test","evaluateArrayConstraints","minItems","maxItems","uniqueItems","len","i","j","deepEqual","evaluateCondition","ifSchema","data","isPlainObj","properties","propsOk","Object","keys","every","key","propDef","hasOwn","const","enum","v","format","formatResult","validateFormat","required","allRequired","allOf","allMatch","entry","anyOf","anyMatch","oneOf","matchCount","matches","not","notResult","DISCRIMINANT_INDICATORS","extractDiscriminants","out","props","hasIndicator","indicator","mergeBranchInto","resolved","branchDef","engine","branchSchema","unionStrings","branchProps","mergedProps","branchProp","existing","merged","merge","dependencies","resolvedDeps","branchDeps","acc","depKey","branchVal","existingVal","has","resolvedVal","Math","max","min","base","branch","schema","discriminant","hasTopLevelIf","if","hasAllOfConditions","e","resolveNestedProperties","resolveAllOfConditions","applicableBranch","then","else","remainingAllOf","push","subSchema","remaining","omitKeys","propKeys","changed","resolvedProps","propSchema","hasConditions","nestedData","nested","dk"],"mappings":"oGAimBgBA,2DAAAA,oDAhmBe,gDAEL,qCAE4C,WAsCtE,MAAMC,mBAAqB,IAAIC,IAAI,CAAC,WAAY,aAAc,eAAe,EAG7E,MAAMC,gBAAkB,IAAID,IAAI,CAC/B,uBACA,QACA,WACA,gBACA,MACA,EAGD,MAAME,SAAW,IAAIF,IAAI,CACxB,UACA,mBACA,YACA,WACA,gBACA,EAGD,MAAMG,SAAW,IAAIH,IAAI,CACxB,UACA,mBACA,YACA,WACA,gBACA,EAOD,SAASI,YAAYC,KAAc,CAAEC,IAAyB,EAC7D,GAAIA,OAASC,UAAW,OAAO,KAE/B,MAAMC,MAAQC,MAAMC,OAAO,CAACJ,MAAQA,KAAO,CAACA,KAAK,CACjD,MAAMK,WAAaC,GAAAA,qBAAS,EAACP,OAE7B,OAAOG,MAAMK,IAAI,CAChB,AAACC,GAAMA,IAAMH,YAAeG,IAAM,UAAYH,aAAe,UAE/D,CAMA,SAASI,2BAA2BV,KAAa,CAAEW,IAAiB,EACnE,GAAIA,KAAKC,OAAO,GAAKV,WAAa,CAAEF,CAAAA,OAASW,KAAKC,OAAO,AAAD,EAAI,OAAO,MACnE,GAAID,KAAKE,OAAO,GAAKX,WAAa,CAAEF,CAAAA,OAASW,KAAKE,OAAO,AAAD,EAAI,OAAO,MACnE,GACCF,KAAKG,gBAAgB,GAAKZ,WAC1B,CAAEF,CAAAA,MAASW,KAAKG,gBAAgB,AAAU,EAE1C,OAAO,MACR,GACCH,KAAKI,gBAAgB,GAAKb,WAC1B,CAAEF,CAAAA,MAASW,KAAKI,gBAAgB,AAAU,EAE1C,OAAO,MACR,GAAIJ,KAAKK,UAAU,GAAKd,WAAaF,MAAQW,KAAKK,UAAU,GAAK,EAChE,OAAO,MACR,OAAO,IACR,CAOA,MAAMC,kBAAoB,IAAIC,IAE9B,SAASC,kBAAkBC,OAAe,EACzC,IAAIC,MAAQJ,kBAAkBK,GAAG,CAACF,SAClC,GAAIC,QAAUnB,UAAW,CACxBmB,MAAQ,IAAIE,OAAOH,SACnBH,kBAAkBO,GAAG,CAACJ,QAASC,MAChC,CACA,OAAOA,KACR,CAEA,SAASI,0BAA0BzB,KAAa,CAAEW,IAAiB,EAClE,GAAIA,KAAKe,SAAS,GAAKxB,WAAa,CAAEF,CAAAA,MAAM2B,MAAM,EAAIhB,KAAKe,SAAS,AAAD,EAClE,OAAO,MACR,GAAIf,KAAKiB,SAAS,GAAK1B,WAAa,CAAEF,CAAAA,MAAM2B,MAAM,EAAIhB,KAAKiB,SAAS,AAAD,EAClE,OAAO,MACR,GACCjB,KAAKS,OAAO,GAAKlB,WACjB,CAACiB,kBAAkBR,KAAKS,OAAO,EAAES,IAAI,CAAC7B,OAEtC,OAAO,MACR,OAAO,IACR,CAMA,SAAS8B,yBACR9B,KAAgB,CAChBW,IAAiB,EAEjB,GAAIA,KAAKoB,QAAQ,GAAK7B,WAAa,CAAEF,CAAAA,MAAM2B,MAAM,EAAIhB,KAAKoB,QAAQ,AAAD,EAChE,OAAO,MACR,GAAIpB,KAAKqB,QAAQ,GAAK9B,WAAa,CAAEF,CAAAA,MAAM2B,MAAM,EAAIhB,KAAKqB,QAAQ,AAAD,EAChE,OAAO,MACR,GAAIrB,KAAKsB,WAAW,GAAK,KAAM,CAG9B,MAAMC,IAAMlC,MAAM2B,MAAM,CACxB,IAAK,IAAIQ,EAAI,EAAGA,EAAID,IAAKC,IAAK,CAC7B,IAAK,IAAIC,EAAID,EAAI,EAAGC,EAAIF,IAAKE,IAAK,CACjC,GAAIC,GAAAA,gBAAS,EAACrC,KAAK,CAACmC,EAAE,CAAEnC,KAAK,CAACoC,EAAE,EAAG,OAAO,KAC3C,CACD,CACD,CACA,OAAO,IACR,CAmBA,SAASE,kBACRC,QAAqB,CACrBC,IAA6B,EAE7B,GAAIC,GAAAA,iBAAU,EAACF,SAASG,UAAU,EAAG,CACpC,MAAMC,QAAUC,OAAOC,IAAI,CAACN,SAASG,UAAU,EAAEI,KAAK,CAAC,AAACC,MACvD,MAAMC,QAAUT,SAASG,UAAU,EAAE,CAACK,IAAI,CAC1C,GAAI,OAAOC,UAAY,UAAW,OAAO,KACzC,MAAMrC,KAAOqC,QACb,MAAMhD,MAAQwC,IAAI,CAACO,IAAI,CAMvB,GAAI/C,QAAUE,UAAW,OAAO,KAGhC,GAAI+C,GAAAA,aAAM,EAACtC,KAAM,SAAU,CAC1B,GAAI,CAAC0B,GAAAA,gBAAS,EAACrC,MAAOW,KAAKuC,KAAK,EAAG,OAAO,KAC3C,CAGA,GAAID,GAAAA,aAAM,EAACtC,KAAM,QAAS,CACzB,GAAI,CAACA,KAAKwC,IAAI,EAAE3C,KAAK,AAAC4C,GAAMf,GAAAA,gBAAS,EAACe,EAAGpD,QAAS,OAAO,KAC1D,CAGA,GAAIiD,GAAAA,aAAM,EAACtC,KAAM,SAAWX,QAAUE,UAAW,CAChD,GAAI,CAACH,YAAYC,MAAOW,KAAKV,IAAI,EAAG,OAAO,KAC5C,CAQA,GAAI,OAAOD,QAAU,SAAU,CAC9B,GAAI,CAACU,2BAA2BV,MAAOW,MAAO,OAAO,KACtD,CAEA,GAAI,OAAOX,QAAU,SAAU,CAC9B,GAAI,CAACyB,0BAA0BzB,MAAOW,MAAO,OAAO,KACrD,CAEA,GAAIP,MAAMC,OAAO,CAACL,OAAQ,CACzB,GAAI,CAAC8B,yBAAyB9B,MAAoBW,MAAO,OAAO,KACjE,CAMA,GAAIA,KAAK0C,MAAM,GAAKnD,WAAa,OAAOF,QAAU,SAAU,CAC3D,MAAMsD,aAAeC,GAAAA,+BAAc,EAACvD,MAAOW,KAAK0C,MAAM,EACtD,GAAIC,eAAiB,MAAO,OAAO,KAEpC,CAQA,GAAIb,GAAAA,iBAAU,EAAC9B,KAAK+B,UAAU,GAAKtC,MAAMC,OAAO,CAACM,KAAK6C,QAAQ,EAAG,CAChE,GAAIf,GAAAA,iBAAU,EAACzC,OAAQ,CACtB,GAAI,CAACsC,kBAAkB3B,KAAMX,OAAmC,CAC/D,OAAO,KACR,CACD,CAED,CAEA,OAAO,IACR,GACA,GAAI,CAAC2C,QAAS,OAAO,KACtB,CAGA,GAAIvC,MAAMC,OAAO,CAACkC,SAASiB,QAAQ,EAAG,CACrC,MAAMC,YAAclB,SAASiB,QAAQ,CAACV,KAAK,CAAC,AAACC,KAC5CE,GAAAA,aAAM,EAACT,KAAMO,MAEd,GAAI,CAACU,YAAa,OAAO,KAC1B,CAIA,GAAIrD,MAAMC,OAAO,CAACkC,SAASmB,KAAK,EAAG,CAClC,MAAMC,SAAWpB,SAASmB,KAAK,CAACZ,KAAK,CAAC,AAACc,QACtC,GAAI,OAAOA,QAAU,UAAW,OAAOA,MACvC,OAAOtB,kBAAkBsB,MAAsBpB,KAChD,GACA,GAAI,CAACmB,SAAU,OAAO,KACvB,CAIA,GAAIvD,MAAMC,OAAO,CAACkC,SAASsB,KAAK,EAAG,CAClC,MAAMC,SAAWvB,SAASsB,KAAK,CAACrD,IAAI,CAAC,AAACoD,QACrC,GAAI,OAAOA,QAAU,UAAW,OAAOA,MACvC,OAAOtB,kBAAkBsB,MAAsBpB,KAChD,GACA,GAAI,CAACsB,SAAU,OAAO,KACvB,CAIA,GAAI1D,MAAMC,OAAO,CAACkC,SAASwB,KAAK,EAAG,CAClC,IAAIC,WAAa,EACjB,IAAK,MAAMJ,SAASrB,SAASwB,KAAK,CAAE,CACnC,MAAME,QACL,OAAOL,QAAU,UACdA,MACAtB,kBAAkBsB,MAAsBpB,MAC5C,GAAIyB,QAASD,aACb,GAAIA,WAAa,EAAG,KACrB,CACA,GAAIA,aAAe,EAAG,OAAO,KAC9B,CAIA,GACCf,GAAAA,aAAM,EAACV,SAAU,QACjBE,GAAAA,iBAAU,EAACF,SAAS2B,GAAG,GACvB,OAAO3B,SAAS2B,GAAG,GAAK,UACvB,CACD,MAAMC,UAAY7B,kBAAkBC,SAAS2B,GAAG,CAAiB1B,MACjE,GAAI2B,UAAW,OAAO,KACvB,CAEA,OAAO,IACR,CAUA,MAAMC,wBAA0B,CAC/B,QACA,OACA,UACA,UACA,mBACA,mBACA,UACA,YACA,YACA,aACA,WACA,WACA,SACA,CAYD,SAASC,qBACR9B,QAAqB,CACrBC,IAA6B,CAC7B8B,GAA4B,EAE5B,GAAI,CAAC7B,GAAAA,iBAAU,EAACF,SAASG,UAAU,EAAG,OAEtC,MAAM6B,MAAQhC,SAASG,UAAU,CACjC,IAAK,MAAMK,OAAOH,OAAOC,IAAI,CAAC0B,OAAQ,CACrC,MAAMvB,QAAUuB,KAAK,CAACxB,IAAI,CAC1B,GAAI,OAAOC,UAAY,UAAW,SAClC,MAAMrC,KAAOqC,QAGb,MAAMwB,aAAeJ,wBAAwB5D,IAAI,CAAC,AAACiE,WAClDxB,GAAAA,aAAM,EAACtC,KAAM8D,YAGd,GAAID,cAAgBvB,GAAAA,aAAM,EAACT,KAAMO,KAAM,CACtCuB,GAAG,CAACvB,IAAI,CAAGP,IAAI,CAACO,IAAI,AACrB,CACD,CACD,CAwBA,SAAS2B,gBACRC,QAAqB,CACrBC,SAAgC,CAChCC,MAAmB,EAEnB,GAAI,OAAOD,YAAc,UAAW,OAEpC,MAAME,aAAeF,UAGrB,GAAIxE,MAAMC,OAAO,CAACyE,aAAatB,QAAQ,EAAG,CACzCmB,SAASnB,QAAQ,CAAGuB,GAAAA,mBAAY,EAC/BJ,SAASnB,QAAQ,EAAI,EAAE,CACvBsB,aAAatB,QAAQ,CAEvB,CAGA,GAAIf,GAAAA,iBAAU,EAACqC,aAAapC,UAAU,EAAG,CACxC,MAAMsC,YAAcF,aAAapC,UAAU,CAI3C,MAAMuC,YAAqD,CAC1D,GAAIN,SAASjC,UAAU,EAAI,CAAC,CAAC,AAC9B,EACA,IAAK,MAAMK,OAAOH,OAAOC,IAAI,CAACmC,aAAc,CAC3C,MAAME,WAAaF,WAAW,CAACjC,IAAI,CACnC,GAAImC,aAAehF,UAAW,SAC9B,MAAMiF,SAAWR,SAASjC,UAAU,EAAE,CAACK,IAAI,CAC3C,GACCoC,WAAajF,WACb,OAAOiF,WAAa,WACpB,OAAOD,aAAe,UACrB,CACD,MAAME,OAASP,OAAOQ,KAAK,CAC1BF,SACAD,WAEDD,CAAAA,WAAW,CAAClC,IAAI,CAAIqC,QAAUF,UAC/B,KAAO,CACND,WAAW,CAAClC,IAAI,CAAGmC,UACpB,CACD,CACAP,SAASjC,UAAU,CAAGuC,WACvB,CAGA,GAAIxC,GAAAA,iBAAU,EAACqC,aAAaQ,YAAY,EAAG,CAC1C,MAAMC,aAAgBZ,SAASW,YAAY,EAAI,CAAC,EAIhD,MAAME,WAAaV,aAAaQ,YAAY,CAK5C,MAAMG,IAAM,CAAE,GAAGF,YAAY,AAAC,EAC9B,IAAK,MAAMG,UAAU9C,OAAOC,IAAI,CAAC2C,YAAa,CAC7C,MAAMG,UAAYH,UAAU,CAACE,OAAO,CAIpC,GAAIC,YAAczF,UAAW,SAC7B,MAAM0F,YAAcH,GAAG,CAACC,OAAO,CAK/B,GAAIE,cAAgB1F,UAAW,CAE9BuF,GAAG,CAACC,OAAO,CAAGC,SACf,MAAO,GAAIvF,MAAMC,OAAO,CAACuF,cAAgBxF,MAAMC,OAAO,CAACsF,WAAY,CAElEF,GAAG,CAACC,OAAO,CAAGX,GAAAA,mBAAY,EACzBa,YACAD,UAEF,MAAO,GAAIlD,GAAAA,iBAAU,EAACmD,cAAgBnD,GAAAA,iBAAU,EAACkD,WAAY,CAE5D,MAAMP,OAASP,OAAOQ,KAAK,CAC1BO,YACAD,UAEDF,CAAAA,GAAG,CAACC,OAAO,CAAIN,QAAUO,SAC1B,KAAO,CAENF,GAAG,CAACC,OAAO,CAAGC,SACf,CACD,CACAhB,SAASW,YAAY,CAAGG,GAIzB,CAGA,IAAK,MAAM1C,OAAOH,OAAOC,IAAI,CAACiC,cAAwC,CAErE,GAAIpF,mBAAmBmG,GAAG,CAAC9C,KAAM,OAEjC,MAAM4C,UAAYb,YAAY,CAAC/B,IAAI,CACnC,MAAM+C,YAAcnB,QAAQ,CAAC5B,IAAI,CAGjC,GAAI+C,cAAgB5F,UAAW,CAC9B,AAACyE,QAAoC,CAAC5B,IAAI,CAAG4C,UAC7C,MACD,CAGA,GAAItD,GAAAA,gBAAS,EAACyD,YAAaH,WAAY,OAGvC,GAAI/F,gBAAgBiG,GAAG,CAAC9C,KAAM,CAC7B,MAAMqC,OAASP,OAAOQ,KAAK,CAC1BS,YACAH,WAED,GAAIP,SAAW,KAAM,CACpB,AAACT,QAAoC,CAAC5B,IAAI,CAAGqC,MAC9C,KAAO,CAEN,AAACT,QAAoC,CAAC5B,IAAI,CAAG4C,SAC9C,CACA,MACD,CAGA,GAAI9F,SAASgG,GAAG,CAAC9C,KAAM,CACtB,GAAI,OAAO+C,cAAgB,UAAY,OAAOH,YAAc,SAAU,CACrE,AAAChB,QAAoC,CAAC5B,IAAI,CAAGgD,KAAKC,GAAG,CACpDF,YACAH,UAEF,KAAO,CACN,AAAChB,QAAoC,CAAC5B,IAAI,CAAG4C,SAC9C,CACA,MACD,CAGA,GAAI7F,SAAS+F,GAAG,CAAC9C,KAAM,CACtB,GAAI,OAAO+C,cAAgB,UAAY,OAAOH,YAAc,SAAU,CACrE,AAAChB,QAAoC,CAAC5B,IAAI,CAAGgD,KAAKE,GAAG,CACpDH,YACAH,UAEF,KAAO,CACN,AAAChB,QAAoC,CAAC5B,IAAI,CAAG4C,SAC9C,CACA,MACD,CAGA,GAAI5C,MAAQ,cAAe,CAC1B,AAAC4B,QAAoC,CAAC5B,IAAI,CACzC+C,cAAgB,MAAQH,YAAc,KACvC,MACD,CAGA,GAAI5C,MAAQ,WAAaA,MAAQ,SAAU,CAC1C,AAAC4B,QAAoC,CAAC5B,IAAI,CAAG4C,UAC7C,MACD,CAGA,MAAMO,KAAO,CAAE,CAACnD,IAAI,CAAE+C,WAAY,EAClC,MAAMK,OAAS,CAAE,CAACpD,IAAI,CAAE4C,SAAU,EAClC,MAAMP,OAASP,OAAOQ,KAAK,CAACa,KAAMC,QAClC,GACCf,QACA,OAAOA,SAAW,WAClBnC,GAAAA,aAAM,EAACmC,OAAkBrC,KACxB,CACD,AAAC4B,QAAoC,CAAC5B,IAAI,CAAG,AAC5CqC,MACA,CAACrC,IAAI,AACP,KAAO,CAEN,AAAC4B,QAAoC,CAAC5B,IAAI,CAAG4C,SAC9C,CACD,CACD,CA0BO,SAASlG,kBACf2G,MAAmB,CACnB5D,IAA6B,CAC7BqC,MAAmB,EAEnB,IAAIsB,OAAiC,KACrC,MAAME,aAAwC,CAAC,EAI/C,MAAMC,cAAgBF,OAAOG,EAAE,GAAKrG,UACpC,MAAMsG,mBACLpG,MAAMC,OAAO,CAAC+F,OAAO1C,KAAK,GAC1B0C,OAAO1C,KAAK,CAAClD,IAAI,CAChB,AAACiG,GAAM,OAAOA,IAAM,WAAaxD,GAAAA,aAAM,EAACwD,EAAa,OAGvD,GAAI,CAACH,eAAiB,CAACE,mBAAoB,CAG1C,MAAM7B,SAAW+B,wBAChBN,OACA5D,KACAqC,OACAwB,cAED,MAAO,CAAE1B,SAAUwB,OAAQE,YAAa,CACzC,CAGA,IAAI1B,SAAW,CAAE,GAAGyB,MAAM,AAAC,EAG3B,GAAII,mBAAoB,CACvB7B,SAAWgC,uBAAuBhC,SAAUnC,KAAMqC,OAAQwB,aAC3D,CAGA,GAAI1B,SAAS4B,EAAE,GAAKrG,UAAW,CAC9B,MAAMqC,SAAWoC,SAAS4B,EAAE,CAC5B,MAAMtC,QAAU3B,kBAAkBC,SAAUC,MAE5C6B,qBAAqB9B,SAAUC,KAAM6D,cAErC,MAAMO,iBAAmB3C,QAAUU,SAASkC,IAAI,CAAGlC,SAASmC,IAAI,CAChEX,OAASlC,QAAU,OAAS,OAE5B,GAAI2C,iBAAkB,CACrBlC,gBACCC,SACAiC,iBACA/B,OAEF,CAEA,OAAOF,SAAS4B,EAAE,AAClB,QAAO5B,SAASkC,IAAI,AACpB,QAAOlC,SAASmC,IAAI,AACrB,CAGAnC,SAAW+B,wBAAwB/B,SAAUnC,KAAMqC,OAAQwB,cAE3D,MAAO,CAAE1B,SAAUwB,OAAQE,YAAa,CACzC,CAWA,SAASM,uBACRhC,QAAqB,CACrBnC,IAA6B,CAC7BqC,MAAmB,CACnBwB,YAAqC,EAErC,GAAI,CAACjG,MAAMC,OAAO,CAACsE,SAASjB,KAAK,EAAG,OAAOiB,SAE3C,MAAMoC,eAA0C,EAAE,CAElD,IAAK,MAAMnD,SAASe,SAASjB,KAAK,CAAE,CACnC,GAAI,OAAOE,QAAU,UAAW,CAC/BmD,eAAeC,IAAI,CAACpD,OACpB,QACD,CAEA,MAAMqD,UAAYrD,MAElB,GAAIqD,UAAUV,EAAE,GAAKrG,UAAW,CAC/B6G,eAAeC,IAAI,CAACpD,OACpB,QACD,CAGA,MAAMrB,SAAW0E,UAAUV,EAAE,CAC7B,MAAMtC,QAAU3B,kBAAkBC,SAAUC,MAE5C6B,qBAAqB9B,SAAUC,KAAM6D,cAErC,MAAMO,iBAAmB3C,QAAUgD,UAAUJ,IAAI,CAAGI,UAAUH,IAAI,CAElE,GAAIF,iBAAkB,CACrBlC,gBACCC,SACAiC,iBACA/B,OAEF,CAGA,MAAMqC,UAAYC,GAAAA,eAAQ,EACzBF,UACA,CAAC,KAAM,OAAQ,OAAO,EAEvB,GAAIrE,OAAOC,IAAI,CAACqE,WAAWvF,MAAM,CAAG,EAAG,CACtCoF,eAAeC,IAAI,CAACE,UACrB,CACD,CAEAvC,SAAW,CAAE,GAAGA,QAAQ,AAAC,EACzB,GAAIoC,eAAepF,MAAM,GAAK,EAAG,CAChC,OAAOgD,SAASjB,KAAK,AACtB,KAAO,CACNiB,SAASjB,KAAK,CAAGqD,cAClB,CAEA,OAAOpC,QACR,CASA,SAAS+B,wBACR/B,QAAqB,CACrBnC,IAA6B,CAC7BqC,MAAmB,CACnBwB,YAAqC,EAErC,GAAI,CAAC5D,GAAAA,iBAAU,EAACkC,SAASjC,UAAU,EAAG,OAAOiC,SAE7C,MAAMJ,MAAQI,SAASjC,UAAU,CACjC,MAAM0E,SAAWxE,OAAOC,IAAI,CAAC0B,OAC7B,IAAI8C,QAAU,MACd,MAAMC,cAAuD,CAAC,EAE9D,IAAK,MAAMvE,OAAOqE,SAAU,CAC3B,MAAMpE,QAAUuB,KAAK,CAACxB,IAAI,CAC1B,GAAIC,UAAY9C,UAAW,SAC3B,GAAI,OAAO8C,UAAY,UAAW,CACjCsE,aAAa,CAACvE,IAAI,CAAGC,QACrB,QACD,CAEA,MAAMuE,WAAavE,QACnB,MAAMwE,cACLD,WAAWhB,EAAE,GAAKrG,WACjBE,MAAMC,OAAO,CAACkH,WAAW7D,KAAK,GAC9B6D,WAAW7D,KAAK,CAAClD,IAAI,CACpB,AAACiG,GAAM,OAAOA,IAAM,WAAaxD,GAAAA,aAAM,EAACwD,EAAa,OAGxD,GAAI,CAACe,cAAe,CACnBF,aAAa,CAACvE,IAAI,CAAGC,QACrB,QACD,CAGA,MAAMyE,WAAahF,GAAAA,iBAAU,EAACD,IAAI,CAACO,IAAI,EACnCP,IAAI,CAACO,IAAI,CACV,CAAC,EAEJ,MAAM2E,OAASjI,kBAAkB8H,WAAYE,WAAY5C,QAGzD,IAAK,MAAM8C,MAAM/E,OAAOC,IAAI,CAAC6E,OAAOrB,YAAY,EAAG,CAClDA,YAAY,CAAC,CAAC,EAAEtD,IAAI,CAAC,EAAE4E,GAAG,CAAC,CAAC,CAAGD,OAAOrB,YAAY,CAACsB,GAAG,AACvD,CAEAL,aAAa,CAACvE,IAAI,CAAG2E,OAAO/C,QAAQ,CACpC0C,QAAU,IACX,CAEA,OAAOA,QAAU,CAAE,GAAG1C,QAAQ,CAAEjC,WAAY4E,aAAc,EAAI3C,QAC/D"}
|
|
@@ -0,0 +1,2 @@
|
|
|
1
|
+
"use strict";Object.defineProperty(exports,"__esModule",{value:true});function _export(target,all){for(var name in all)Object.defineProperty(target,name,{enumerable:true,get:Object.getOwnPropertyDescriptor(all,name).get})}_export(exports,{get FORMAT_SUPERSETS(){return FORMAT_SUPERSETS},get KNOWN_FORMATS(){return KNOWN_FORMATS},get isFormatSubset(){return isFormatSubset},get isKnownFormat(){return isKnownFormat},get validateFormat(){return validateFormat}});const _classvalidator=require("class-validator");const TIME_REGEX=/^\d{2}:\d{2}:\d{2}(\.\d+)?(Z|[+-]\d{2}:\d{2})?$/;const DATE_REGEX=/^\d{4}-\d{2}-\d{2}$/;const JSON_POINTER_REGEX=/^(\/([^~/]|~[01])*)*$/;const RELATIVE_JSON_POINTER_REGEX=/^\d+(#|(\/([^~/]|~[01])*)*)$/;const _URI_TEMPLATE_REGEX=/\{[^}]+\}/;const KNOWN_FORMATS=new Set(["date-time","date","time","email","idn-email","hostname","idn-hostname","ipv4","ipv6","uri","uri-reference","iri","iri-reference","uri-template","uuid","json-pointer","relative-json-pointer","regex"]);const FORMAT_SUPERSETS={email:["idn-email"],hostname:["idn-hostname"],uri:["iri"],"uri-reference":["iri-reference"]};const FORMAT_VALIDATORS={"date-time":value=>{return(0,_classvalidator.isISO8601)(value,{strict:true})},date:value=>{if(!DATE_REGEX.test(value))return false;const d=new Date(`${value}T00:00:00Z`);return!Number.isNaN(d.getTime())&&value===d.toISOString().slice(0,10)},time:value=>{return TIME_REGEX.test(value)},email:value=>{return(0,_classvalidator.isEmail)(value)},"idn-email":value=>{return(0,_classvalidator.isEmail)(value)},hostname:value=>{return(0,_classvalidator.isFQDN)(value,{require_tld:false})},"idn-hostname":value=>{return(0,_classvalidator.isFQDN)(value,{require_tld:false})},ipv4:value=>{return(0,_classvalidator.isIP)(value,4)},ipv6:value=>{return(0,_classvalidator.isIP)(value,6)},uri:value=>{return(0,_classvalidator.isURL)(value,{require_protocol:true})},"uri-reference":value=>{return(0,_classvalidator.isURL)(value,{require_protocol:false})},iri:value=>{return(0,_classvalidator.isURL)(value,{require_protocol:true})},"iri-reference":value=>{return(0,_classvalidator.isURL)(value,{require_protocol:false})},"uri-template":value=>{let inBrace=false;for(const ch of value){if(ch==="{"){if(inBrace)return false;inBrace=true}else if(ch==="}"){if(!inBrace)return false;inBrace=false}}return!inBrace},uuid:value=>{return(0,_classvalidator.isUUID)(value)},"json-pointer":value=>{if(value==="")return true;return JSON_POINTER_REGEX.test(value)},"relative-json-pointer":value=>{return RELATIVE_JSON_POINTER_REGEX.test(value)},regex:value=>{try{new RegExp(value);return true}catch{return false}}};function isKnownFormat(format){return KNOWN_FORMATS.has(format)}function validateFormat(value,format){if(typeof value!=="string")return true;const validator=FORMAT_VALIDATORS[format];if(!validator)return null;return validator(value)}function isFormatSubset(subFormat,supFormat){if(subFormat===supFormat)return true;const supersets=FORMAT_SUPERSETS[subFormat];if(supersets?.includes(supFormat)){return true}return null}
|
|
2
|
+
//# sourceMappingURL=format-validator.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/format-validator.ts"],"sourcesContent":["import {\n\tisEmail,\n\tisFQDN,\n\tisIP,\n\tisISO8601,\n\tisURL,\n\tisUUID,\n} from \"class-validator\";\n\n// ─── Format Validator ─────────────────────────────────────────────────────────\n//\n// Valide les valeurs contre les formats JSON Schema Draft-07 en utilisant\n// les fonctions utilitaires de `class-validator`.\n//\n// ⚠️ Ce module ne gère PAS la relation format ⊆ type.\n// Cette relation est déjà correctement gérée par l'approche merge :\n// - { format: \"email\" } ⊆ { type: \"string\" } → true (merge ne change rien)\n// - { type: \"string\" } ⊄ { format: \"email\" } → false (merge ajoute format)\n//\n// Ce module gère UNIQUEMENT :\n// 1. La validation d'une valeur runtime contre un format (evaluateCondition)\n// 2. La compatibilité entre deux formats différents (format-vs-format)\n//\n// Expose :\n// - `validateFormat(value, format)` → validation runtime d'une valeur\n// - `isFormatSubset(sub, sup)` → compatibilité statique format-vs-format\n// - `isKnownFormat(format)` → vérifie si le format est supporté\n// - `FORMAT_SUPERSETS` → hiérarchie d'inclusion entre formats\n\n// ─── Regex patterns ──────────────────────────────────────────────────────────\n\n/** Regex pour le format `time` (HH:MM:SS avec offset optionnel) */\nconst TIME_REGEX = /^\\d{2}:\\d{2}:\\d{2}(\\.\\d+)?(Z|[+-]\\d{2}:\\d{2})?$/;\n\n/** Regex pour le format `date` (YYYY-MM-DD strict) */\nconst DATE_REGEX = /^\\d{4}-\\d{2}-\\d{2}$/;\n\n/** Regex pour le format `json-pointer` (RFC 6901) */\nconst JSON_POINTER_REGEX = /^(\\/([^~/]|~[01])*)*$/;\n\n/** Regex pour le format `relative-json-pointer` (extension Draft-07) */\nconst RELATIVE_JSON_POINTER_REGEX = /^\\d+(#|(\\/([^~/]|~[01])*)*)$/;\n\n/** Regex pour le format `uri-template` (RFC 6570 — vérification basique) */\nconst _URI_TEMPLATE_REGEX = /\\{[^}]+\\}/;\n\n// ─── Known formats ──────────────────────────────────────────────────────────\n\n/** Formats reconnus par le validateur */\nexport const KNOWN_FORMATS: ReadonlySet<string> = new Set([\n\t\"date-time\",\n\t\"date\",\n\t\"time\",\n\t\"email\",\n\t\"idn-email\",\n\t\"hostname\",\n\t\"idn-hostname\",\n\t\"ipv4\",\n\t\"ipv6\",\n\t\"uri\",\n\t\"uri-reference\",\n\t\"iri\",\n\t\"iri-reference\",\n\t\"uri-template\",\n\t\"uuid\",\n\t\"json-pointer\",\n\t\"relative-json-pointer\",\n\t\"regex\",\n]);\n\n// ─── Format hierarchy ────────────────────────────────────────────────────────\n\n/**\n * Hiérarchie d'inclusion ENTRE FORMATS (pas format-vs-type).\n *\n * `FORMAT_SUPERSETS[format]` = liste des formats qui sont des sur-ensembles\n * de `format` (i.e., toute valeur valide pour `format` est aussi valide\n * pour chacun des sur-ensembles).\n *\n * Cette hiérarchie ne concerne QUE les comparaisons format-vs-format.\n * La relation format ⊆ type (ex: email ⊆ string) est gérée nativement\n * par le merge engine et n'a pas besoin d'être modélisée ici.\n *\n * En pratique, la plupart des formats sont **incomparables** (pas de relation d'inclusion).\n * Seule l'identité (même format) et les quelques relations ci-dessous sont garanties.\n */\nexport const FORMAT_SUPERSETS: Record<string, string[]> = {\n\temail: [\"idn-email\"], // email ⊆ idn-email (toute email ASCII est une idn-email)\n\thostname: [\"idn-hostname\"], // hostname ⊆ idn-hostname\n\turi: [\"iri\"], // uri ⊆ iri (toute URI est une IRI)\n\t\"uri-reference\": [\"iri-reference\"], // uri-reference ⊆ iri-reference\n};\n\n// ─── Format validators (internal) ───────────────────────────────────────────\n\n/**\n * Map interne des fonctions de validation par format.\n *\n * Chaque entrée associe un format Draft-07 à une fonction qui prend\n * une valeur `string` et retourne `boolean`.\n *\n * Utilise les fonctions standalone de `class-validator` quand disponibles,\n * sinon des regex ou des heuristiques.\n */\nconst FORMAT_VALIDATORS: Record<string, (value: string) => boolean> = {\n\t/** ISO 8601 date-time (ex: \"2023-01-15T10:30:00Z\") */\n\t\"date-time\": (value: string): boolean => {\n\t\treturn isISO8601(value, { strict: true });\n\t},\n\n\t/** Date complète (ex: \"2023-01-15\") */\n\tdate: (value: string): boolean => {\n\t\tif (!DATE_REGEX.test(value)) return false;\n\t\t// Vérifier que la date est valide (pas de 2023-02-30)\n\t\tconst d = new Date(`${value}T00:00:00Z`);\n\t\treturn !Number.isNaN(d.getTime()) && value === d.toISOString().slice(0, 10);\n\t},\n\n\t/** Heure complète (ex: \"10:30:00\") */\n\ttime: (value: string): boolean => {\n\t\treturn TIME_REGEX.test(value);\n\t},\n\n\t/** Adresse email (RFC 5321) */\n\temail: (value: string): boolean => {\n\t\treturn isEmail(value);\n\t},\n\n\t/** Adresse email internationalisée (approximation via isEmail) */\n\t\"idn-email\": (value: string): boolean => {\n\t\treturn isEmail(value);\n\t},\n\n\t/** Nom d'hôte (RFC 1123) */\n\thostname: (value: string): boolean => {\n\t\treturn isFQDN(value, { require_tld: false });\n\t},\n\n\t/** Nom d'hôte internationalisé (approximation via isFQDN) */\n\t\"idn-hostname\": (value: string): boolean => {\n\t\treturn isFQDN(value, { require_tld: false });\n\t},\n\n\t/** Adresse IPv4 (ex: \"192.168.1.1\") */\n\tipv4: (value: string): boolean => {\n\t\treturn isIP(value, 4);\n\t},\n\n\t/** Adresse IPv6 (ex: \"::1\") */\n\tipv6: (value: string): boolean => {\n\t\treturn isIP(value, 6);\n\t},\n\n\t/** URI absolue (RFC 3986) */\n\turi: (value: string): boolean => {\n\t\treturn isURL(value, { require_protocol: true });\n\t},\n\n\t/** Référence URI (absolue ou relative — approximation via isURL) */\n\t\"uri-reference\": (value: string): boolean => {\n\t\t// Une uri-reference peut être relative, isURL est une approximation\n\t\treturn isURL(value, { require_protocol: false });\n\t},\n\n\t/** IRI (RFC 3987 — approximation via isURL) */\n\tiri: (value: string): boolean => {\n\t\treturn isURL(value, { require_protocol: true });\n\t},\n\n\t/** Référence IRI (approximation via isURL) */\n\t\"iri-reference\": (value: string): boolean => {\n\t\treturn isURL(value, { require_protocol: false });\n\t},\n\n\t/** Template URI (RFC 6570 — vérification basique) */\n\t\"uri-template\": (value: string): boolean => {\n\t\t// Un uri-template valide peut contenir des expressions entre accolades\n\t\t// ou être une URI simple sans template expressions.\n\t\t// On vérifie juste que les accolades sont bien formées.\n\t\tlet inBrace = false;\n\t\tfor (const ch of value) {\n\t\t\tif (ch === \"{\") {\n\t\t\t\tif (inBrace) return false; // Accolades imbriquées\n\t\t\t\tinBrace = true;\n\t\t\t} else if (ch === \"}\") {\n\t\t\t\tif (!inBrace) return false; // Accolade fermante sans ouvrante\n\t\t\t\tinBrace = false;\n\t\t\t}\n\t\t}\n\t\treturn !inBrace; // Pas d'accolade non fermée\n\t},\n\n\t/** UUID (RFC 4122) */\n\tuuid: (value: string): boolean => {\n\t\treturn isUUID(value);\n\t},\n\n\t/** JSON Pointer (RFC 6901) */\n\t\"json-pointer\": (value: string): boolean => {\n\t\t// Chaîne vide est un json-pointer valide (pointe vers la racine)\n\t\tif (value === \"\") return true;\n\t\treturn JSON_POINTER_REGEX.test(value);\n\t},\n\n\t/** Relative JSON Pointer (extension Draft-07) */\n\t\"relative-json-pointer\": (value: string): boolean => {\n\t\treturn RELATIVE_JSON_POINTER_REGEX.test(value);\n\t},\n\n\t/** Expression régulière ECMA-262 */\n\tregex: (value: string): boolean => {\n\t\ttry {\n\t\t\tnew RegExp(value);\n\t\t\treturn true;\n\t\t} catch {\n\t\t\treturn false;\n\t\t}\n\t},\n};\n\n// ─── Public API ──────────────────────────────────────────────────────────────\n\n/**\n * Vérifie si le format est connu/supporté.\n *\n * @param format Le nom du format à vérifier\n * @returns `true` si le format est dans la liste des formats reconnus\n */\nexport function isKnownFormat(format: string): boolean {\n\treturn KNOWN_FORMATS.has(format);\n}\n\n/**\n * Valide une valeur contre un format JSON Schema Draft-07.\n *\n * Retourne `true` si la valeur est valide pour le format,\n * `false` si elle ne l'est pas, `null` si le format est inconnu.\n *\n * Ne valide que les strings — pour les non-strings, retourne `true`\n * (le format ne s'applique qu'aux strings en Draft-07).\n *\n * @param value La valeur à valider\n * @param format Le format JSON Schema Draft-07 à vérifier\n * @returns `true` si valide, `false` si invalide, `null` si format inconnu\n *\n * @example\n * ```ts\n * validateFormat(\"test@example.com\", \"email\"); // true\n * validateFormat(\"not-an-email\", \"email\"); // false\n * validateFormat(42, \"email\"); // true (non-string → skip)\n * validateFormat(\"foo\", \"unknown-format\"); // null (format inconnu)\n * ```\n */\nexport function validateFormat(value: unknown, format: string): boolean | null {\n\t// Le format ne s'applique qu'aux strings en Draft-07\n\tif (typeof value !== \"string\") return true;\n\n\tconst validator = FORMAT_VALIDATORS[format];\n\tif (!validator) return null; // Format inconnu → indéterminé\n\n\treturn validator(value);\n}\n\n/**\n * Vérifie si le format `sub` est un sous-ensemble du format `sup`.\n *\n * ⚠️ Cette fonction compare UNIQUEMENT deux formats entre eux.\n * Elle ne gère PAS la relation format ⊆ type (ex: email ⊆ string),\n * qui est déjà correctement gérée par le merge engine.\n *\n * `sub ⊆ sup` signifie : toute valeur valide pour `sub` est aussi valide pour `sup`.\n *\n * Retourne `true` si `sub ⊆ sup`, `false` si incompatible, `null` si indéterminé.\n *\n * Cas gérés :\n * - Identité : `sub === sup` → `true`\n * - Hiérarchie : `sup` est dans `FORMAT_SUPERSETS[sub]` → `true`\n * - Hiérarchie inverse : `sub` est dans `FORMAT_SUPERSETS[sup]` → `null`\n * (le subset est un sur-ensemble du superset → indéterminé, pas un conflit\n * car certaines valeurs valides pour sub pourraient aussi être valides pour sup)\n * - Formats différents sans relation connue → `null` (indéterminé)\n *\n * @param subFormat Le format du schema sub\n * @param supFormat Le format du schema sup\n * @returns `true` si sub ⊆ sup, `null` si indéterminé\n *\n * @example\n * ```ts\n * isFormatSubset(\"email\", \"email\"); // true (identité)\n * isFormatSubset(\"email\", \"idn-email\"); // true (email ⊆ idn-email)\n * isFormatSubset(\"email\", \"ipv4\"); // null (incomparable)\n * isFormatSubset(\"idn-email\", \"email\"); // null (sur-ensemble, pas sous-ensemble)\n * ```\n */\nexport function isFormatSubset(\n\tsubFormat: string,\n\tsupFormat: string,\n): boolean | null {\n\t// Identité : même format → toujours un sous-ensemble\n\tif (subFormat === supFormat) return true;\n\n\t// Hiérarchie : vérifier si sup est un sur-ensemble connu de sub\n\tconst supersets = FORMAT_SUPERSETS[subFormat];\n\tif (supersets?.includes(supFormat)) {\n\t\treturn true;\n\t}\n\n\t// Formats différents sans relation connue → indéterminé\n\t// On ne retourne PAS false ici car on ne peut pas affirmer l'incompatibilité\n\t// entre deux formats quelconques sans les connaître parfaitement.\n\t// Le merge engine (via hasFormatConflict) se charge de détecter les conflits\n\t// quand les deux schemas ont un format et qu'aucune relation n'est connue.\n\treturn null;\n}\n"],"names":["FORMAT_SUPERSETS","KNOWN_FORMATS","isFormatSubset","isKnownFormat","validateFormat","TIME_REGEX","DATE_REGEX","JSON_POINTER_REGEX","RELATIVE_JSON_POINTER_REGEX","_URI_TEMPLATE_REGEX","Set","email","hostname","uri","FORMAT_VALIDATORS","value","isISO8601","strict","date","test","d","Date","Number","isNaN","getTime","toISOString","slice","time","isEmail","isFQDN","require_tld","ipv4","isIP","ipv6","isURL","require_protocol","iri","inBrace","ch","uuid","isUUID","regex","RegExp","format","has","validator","subFormat","supFormat","supersets","includes"],"mappings":"mPAsFaA,0BAAAA,sBArCAC,uBAAAA,mBAqPGC,wBAAAA,oBAlEAC,uBAAAA,mBAyBAC,wBAAAA,gDAtPT,mBAyBP,MAAMC,WAAa,kDAGnB,MAAMC,WAAa,sBAGnB,MAAMC,mBAAqB,wBAG3B,MAAMC,4BAA8B,+BAGpC,MAAMC,oBAAsB,YAKrB,MAAMR,cAAqC,IAAIS,IAAI,CACzD,YACA,OACA,OACA,QACA,YACA,WACA,eACA,OACA,OACA,MACA,gBACA,MACA,gBACA,eACA,OACA,eACA,wBACA,QACA,EAkBM,MAAMV,iBAA6C,CACzDW,MAAO,CAAC,YAAY,CACpBC,SAAU,CAAC,eAAe,CAC1BC,IAAK,CAAC,MAAM,CACZ,gBAAiB,CAAC,gBAAgB,AACnC,EAaA,MAAMC,kBAAgE,CAErE,YAAa,AAACC,QACb,MAAOC,GAAAA,yBAAS,EAACD,MAAO,CAAEE,OAAQ,IAAK,EACxC,EAGAC,KAAM,AAACH,QACN,GAAI,CAACT,WAAWa,IAAI,CAACJ,OAAQ,OAAO,MAEpC,MAAMK,EAAI,IAAIC,KAAK,CAAC,EAAEN,MAAM,UAAU,CAAC,EACvC,MAAO,CAACO,OAAOC,KAAK,CAACH,EAAEI,OAAO,KAAOT,QAAUK,EAAEK,WAAW,GAAGC,KAAK,CAAC,EAAG,GACzE,EAGAC,KAAM,AAACZ,QACN,OAAOV,WAAWc,IAAI,CAACJ,MACxB,EAGAJ,MAAO,AAACI,QACP,MAAOa,GAAAA,uBAAO,EAACb,MAChB,EAGA,YAAa,AAACA,QACb,MAAOa,GAAAA,uBAAO,EAACb,MAChB,EAGAH,SAAU,AAACG,QACV,MAAOc,GAAAA,sBAAM,EAACd,MAAO,CAAEe,YAAa,KAAM,EAC3C,EAGA,eAAgB,AAACf,QAChB,MAAOc,GAAAA,sBAAM,EAACd,MAAO,CAAEe,YAAa,KAAM,EAC3C,EAGAC,KAAM,AAAChB,QACN,MAAOiB,GAAAA,oBAAI,EAACjB,MAAO,EACpB,EAGAkB,KAAM,AAAClB,QACN,MAAOiB,GAAAA,oBAAI,EAACjB,MAAO,EACpB,EAGAF,IAAK,AAACE,QACL,MAAOmB,GAAAA,qBAAK,EAACnB,MAAO,CAAEoB,iBAAkB,IAAK,EAC9C,EAGA,gBAAiB,AAACpB,QAEjB,MAAOmB,GAAAA,qBAAK,EAACnB,MAAO,CAAEoB,iBAAkB,KAAM,EAC/C,EAGAC,IAAK,AAACrB,QACL,MAAOmB,GAAAA,qBAAK,EAACnB,MAAO,CAAEoB,iBAAkB,IAAK,EAC9C,EAGA,gBAAiB,AAACpB,QACjB,MAAOmB,GAAAA,qBAAK,EAACnB,MAAO,CAAEoB,iBAAkB,KAAM,EAC/C,EAGA,eAAgB,AAACpB,QAIhB,IAAIsB,QAAU,MACd,IAAK,MAAMC,MAAMvB,MAAO,CACvB,GAAIuB,KAAO,IAAK,CACf,GAAID,QAAS,OAAO,MACpBA,QAAU,IACX,MAAO,GAAIC,KAAO,IAAK,CACtB,GAAI,CAACD,QAAS,OAAO,MACrBA,QAAU,KACX,CACD,CACA,MAAO,CAACA,OACT,EAGAE,KAAM,AAACxB,QACN,MAAOyB,GAAAA,sBAAM,EAACzB,MACf,EAGA,eAAgB,AAACA,QAEhB,GAAIA,QAAU,GAAI,OAAO,KACzB,OAAOR,mBAAmBY,IAAI,CAACJ,MAChC,EAGA,wBAAyB,AAACA,QACzB,OAAOP,4BAA4BW,IAAI,CAACJ,MACzC,EAGA0B,MAAO,AAAC1B,QACP,GAAI,CACH,IAAI2B,OAAO3B,OACX,OAAO,IACR,CAAE,KAAM,CACP,OAAO,KACR,CACD,CACD,EAUO,SAASZ,cAAcwC,MAAc,EAC3C,OAAO1C,cAAc2C,GAAG,CAACD,OAC1B,CAuBO,SAASvC,eAAeW,KAAc,CAAE4B,MAAc,EAE5D,GAAI,OAAO5B,QAAU,SAAU,OAAO,KAEtC,MAAM8B,UAAY/B,iBAAiB,CAAC6B,OAAO,CAC3C,GAAI,CAACE,UAAW,OAAO,KAEvB,OAAOA,UAAU9B,MAClB,CAiCO,SAASb,eACf4C,SAAiB,CACjBC,SAAiB,EAGjB,GAAID,YAAcC,UAAW,OAAO,KAGpC,MAAMC,UAAYhD,gBAAgB,CAAC8C,UAAU,CAC7C,GAAIE,WAAWC,SAASF,WAAY,CACnC,OAAO,IACR,CAOA,OAAO,IACR"}
|
|
@@ -0,0 +1,2 @@
|
|
|
1
|
+
"use strict";Object.defineProperty(exports,"__esModule",{value:true});Object.defineProperty(exports,"formatResult",{enumerable:true,get:function(){return formatResult}});function formatErrorLine(e){return` ✗ ${e.key}: expected ${e.expected}, received ${e.received}`}function formatResult(label,result){const icon=result.isSubset?"✅":"❌";const lines=[`${icon} ${label}: ${result.isSubset}`];if(!result.isSubset&&result.errors.length>0){lines.push(" Errors:");for(const e of result.errors){lines.push(` ${formatErrorLine(e)}`)}}return lines.join("\n")}
|
|
2
|
+
//# sourceMappingURL=formatter.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/formatter.ts"],"sourcesContent":["import type { SubsetResult } from \"./types\";\n\n// ─── Result Formatter ────────────────────────────────────────────────────────\n//\n// Formate un `SubsetResult` en chaîne lisible pour logs / debug.\n\n/**\n * Formate une erreur sémantique en ligne lisible.\n *\n * @param e L'erreur à formater\n * @returns Ligne formatée avec le chemin, expected et received\n */\nfunction formatErrorLine(e: {\n\tkey: string;\n\texpected: string;\n\treceived: string;\n}): string {\n\treturn ` ✗ ${e.key}: expected ${e.expected}, received ${e.received}`;\n}\n\n/**\n * Formate un SubsetResult en chaîne lisible (utile pour logs/debug).\n *\n * @param label Label descriptif du check (ex: \"strict ⊆ loose\")\n * @param result Le résultat du check à formater\n * @returns Chaîne multi-lignes formatée avec icônes et erreurs\n *\n * @example\n * ```\n * ✅ strict ⊆ loose: true\n * ```\n *\n * @example\n * ```\n * ❌ loose ⊆ strict: false\n * Errors:\n * ✗ accountId: expected string, received undefined\n * ✗ meetingId: expected not optional, received optional\n * ```\n */\nexport function formatResult(label: string, result: SubsetResult): string {\n\tconst icon = result.isSubset ? \"✅\" : \"❌\";\n\tconst lines: string[] = [`${icon} ${label}: ${result.isSubset}`];\n\n\tif (!result.isSubset && result.errors.length > 0) {\n\t\tlines.push(\" Errors:\");\n\n\t\tfor (const e of result.errors) {\n\t\t\tlines.push(` ${formatErrorLine(e)}`);\n\t\t}\n\t}\n\n\treturn lines.join(\"\\n\");\n}\n"],"names":["formatResult","formatErrorLine","e","key","expected","received","label","result","icon","isSubset","lines","errors","length","push","join"],"mappings":"oGAwCgBA,sDAAAA,gBA5BhB,SAASC,gBAAgBC,CAIxB,EACA,MAAO,CAAC,IAAI,EAAEA,EAAEC,GAAG,CAAC,WAAW,EAAED,EAAEE,QAAQ,CAAC,WAAW,EAAEF,EAAEG,QAAQ,CAAC,CAAC,AACtE,CAsBO,SAASL,aAAaM,KAAa,CAAEC,MAAoB,EAC/D,MAAMC,KAAOD,OAAOE,QAAQ,CAAG,IAAM,IACrC,MAAMC,MAAkB,CAAC,CAAC,EAAEF,KAAK,CAAC,EAAEF,MAAM,EAAE,EAAEC,OAAOE,QAAQ,CAAC,CAAC,CAAC,CAEhE,GAAI,CAACF,OAAOE,QAAQ,EAAIF,OAAOI,MAAM,CAACC,MAAM,CAAG,EAAG,CACjDF,MAAMG,IAAI,CAAC,cAEX,IAAK,MAAMX,KAAKK,OAAOI,MAAM,CAAE,CAC9BD,MAAMG,IAAI,CAAC,CAAC,KAAK,EAAEZ,gBAAgBC,GAAG,CAAC,CACxC,CACD,CAEA,OAAOQ,MAAMI,IAAI,CAAC,KACnB"}
|
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
export { JsonSchemaCompatibilityChecker } from "./json-schema-compatibility-checker.js";
|
|
2
|
+
export { MergeEngine } from "./merge-engine.js";
|
|
3
|
+
export { arePatternsEquivalent, isPatternSubset, isTrivialPattern, } from "./pattern-subset.js";
|
|
4
|
+
export { formatSchemaType } from "./semantic-errors.js";
|
|
5
|
+
export type { ConnectionResult, ResolvedConditionResult, SchemaError, SubsetResult, } from "./types.js";
|
|
@@ -0,0 +1,2 @@
|
|
|
1
|
+
"use strict";Object.defineProperty(exports,"__esModule",{value:true});function _export(target,all){for(var name in all)Object.defineProperty(target,name,{enumerable:true,get:Object.getOwnPropertyDescriptor(all,name).get})}_export(exports,{get JsonSchemaCompatibilityChecker(){return _jsonschemacompatibilitychecker.JsonSchemaCompatibilityChecker},get MergeEngine(){return _mergeengine.MergeEngine},get arePatternsEquivalent(){return _patternsubset.arePatternsEquivalent},get formatSchemaType(){return _semanticerrors.formatSchemaType},get isPatternSubset(){return _patternsubset.isPatternSubset},get isTrivialPattern(){return _patternsubset.isTrivialPattern}});const _jsonschemacompatibilitychecker=require("./json-schema-compatibility-checker.js");const _mergeengine=require("./merge-engine.js");const _patternsubset=require("./pattern-subset.js");const _semanticerrors=require("./semantic-errors.js");
|
|
2
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/index.ts"],"sourcesContent":["export { JsonSchemaCompatibilityChecker } from \"./json-schema-compatibility-checker\";\nexport { MergeEngine } from \"./merge-engine\";\nexport {\n\tarePatternsEquivalent,\n\tisPatternSubset,\n\tisTrivialPattern,\n} from \"./pattern-subset\";\nexport { formatSchemaType } from \"./semantic-errors\";\nexport type {\n\tConnectionResult,\n\tResolvedConditionResult,\n\tSchemaError,\n\tSubsetResult,\n} from \"./types\";\n"],"names":["JsonSchemaCompatibilityChecker","MergeEngine","arePatternsEquivalent","formatSchemaType","isPatternSubset","isTrivialPattern"],"mappings":"mPAASA,wCAAAA,8DAA8B,MAC9BC,qBAAAA,wBAAW,MAEnBC,+BAAAA,oCAAqB,MAIbC,0BAAAA,gCAAgB,MAHxBC,yBAAAA,8BAAe,MACfC,0BAAAA,+BAAgB,kDAL8B,kEACnB,+CAKrB,kDAC0B"}
|
package/dist/{json-schema-compatibility-checker.d.ts → cjs/json-schema-compatibility-checker.d.ts}
RENAMED
|
@@ -1,11 +1,11 @@
|
|
|
1
1
|
import type { JSONSchema7, JSONSchema7Definition } from "json-schema";
|
|
2
|
-
import { resolveConditions } from "./condition-resolver";
|
|
3
|
-
import { formatResult } from "./formatter";
|
|
4
|
-
import { MergeEngine } from "./merge-engine";
|
|
5
|
-
import { normalize } from "./normalizer";
|
|
6
|
-
import { arePatternsEquivalent, isPatternSubset, isTrivialPattern } from "./pattern-subset";
|
|
7
|
-
import type { BranchResult, BranchType } from "./subset-checker";
|
|
8
|
-
import type { ConnectionResult, ResolvedConditionResult, SchemaError, SubsetResult } from "./types";
|
|
2
|
+
import { resolveConditions } from "./condition-resolver.js";
|
|
3
|
+
import { formatResult } from "./formatter.js";
|
|
4
|
+
import { MergeEngine } from "./merge-engine.js";
|
|
5
|
+
import { normalize } from "./normalizer.js";
|
|
6
|
+
import { arePatternsEquivalent, isPatternSubset, isTrivialPattern } from "./pattern-subset.js";
|
|
7
|
+
import type { BranchResult, BranchType } from "./subset-checker.js";
|
|
8
|
+
import type { ConnectionResult, ResolvedConditionResult, SchemaError, SubsetResult } from "./types.js";
|
|
9
9
|
export type { SchemaError, SubsetResult, ConnectionResult, ResolvedConditionResult, BranchType, BranchResult, };
|
|
10
10
|
export { normalize, resolveConditions, formatResult, MergeEngine, isPatternSubset, arePatternsEquivalent, isTrivialPattern, };
|
|
11
11
|
export declare class JsonSchemaCompatibilityChecker {
|
|
@@ -0,0 +1,2 @@
|
|
|
1
|
+
"use strict";Object.defineProperty(exports,"__esModule",{value:true});function _export(target,all){for(var name in all)Object.defineProperty(target,name,{enumerable:true,get:Object.getOwnPropertyDescriptor(all,name).get})}_export(exports,{get JsonSchemaCompatibilityChecker(){return JsonSchemaCompatibilityChecker},get MergeEngine(){return _mergeengine.MergeEngine},get arePatternsEquivalent(){return _patternsubset.arePatternsEquivalent},get formatResult(){return _formatter.formatResult},get isPatternSubset(){return _patternsubset.isPatternSubset},get isTrivialPattern(){return _patternsubset.isTrivialPattern},get normalize(){return _normalizer.normalize},get resolveConditions(){return _conditionresolver.resolveConditions}});const _conditionresolver=require("./condition-resolver.js");const _formatter=require("./formatter.js");const _mergeengine=require("./merge-engine.js");const _normalizer=require("./normalizer.js");const _patternsubset=require("./pattern-subset.js");const _subsetchecker=require("./subset-checker.js");const _utils=require("./utils.js");function _define_property(obj,key,value){if(key in obj){Object.defineProperty(obj,key,{value:value,enumerable:true,configurable:true,writable:true})}else{obj[key]=value}return obj}class JsonSchemaCompatibilityChecker{isSubset(sub,sup){if(sub===sup)return true;if((0,_utils.deepEqual)(sub,sup))return true;const nSub=(0,_normalizer.normalize)(sub);const nSup=(0,_normalizer.normalize)(sup);if(nSub!==sub&&nSup!==sup&&(0,_utils.deepEqual)(nSub,nSup))return true;if(nSub!==nSup&&(0,_utils.deepEqual)(nSub,nSup))return true;const{branches:subBranches}=(0,_subsetchecker.getBranchesTyped)(nSub);if(subBranches.length>1||subBranches[0]!==nSub){return subBranches.every(branch=>(0,_subsetchecker.isAtomicSubsetOf)(branch,nSup,this.engine))}return(0,_subsetchecker.isAtomicSubsetOf)(nSub,nSup,this.engine)}check(sub,sup){if(sub===sup){return{isSubset:true,merged:sub,errors:[]}}if((0,_utils.deepEqual)(sub,sup)){return{isSubset:true,merged:sub,errors:[]}}const nSub=(0,_normalizer.normalize)(sub);const nSup=(0,_normalizer.normalize)(sup);if((0,_utils.deepEqual)(nSub,nSup)){return{isSubset:true,merged:nSub,errors:[]}}const{branches:subBranches,type:subBranchType}=(0,_subsetchecker.getBranchesTyped)(nSub);const{branches:supBranches,type:supBranchType}=(0,_subsetchecker.getBranchesTyped)(nSup);if(subBranches.length>1||subBranches[0]!==nSub){return(0,_subsetchecker.checkBranchedSub)(subBranches,nSup,this.engine,subBranchType)}if(supBranches.length>1||supBranches[0]!==nSup){return(0,_subsetchecker.checkBranchedSup)(nSub,supBranches,this.engine,supBranchType)}return(0,_subsetchecker.checkAtomic)(nSub,nSup,this.engine)}canConnect(sourceOutput,targetInput){const result=this.check(sourceOutput,targetInput);return{...result,direction:"sourceOutput ⊆ targetInput"}}isEqual(a,b){return this.engine.isEqual((0,_normalizer.normalize)(a),(0,_normalizer.normalize)(b))}intersect(a,b){if(a===b||(0,_utils.deepEqual)(a,b))return(0,_normalizer.normalize)(a);const nA=(0,_normalizer.normalize)(a);const nB=(0,_normalizer.normalize)(b);if((0,_utils.deepEqual)(nA,nB))return nA;const merged=this.engine.merge(nA,nB);if(merged===null)return null;if((0,_utils.deepEqual)(merged,nA)||(0,_utils.deepEqual)(merged,nB))return merged;return(0,_normalizer.normalize)(merged)}resolveConditions(schema,data){return(0,_conditionresolver.resolveConditions)(schema,data,this.engine)}checkResolved(sub,sup,subData,supData){const resolvedSub=(0,_conditionresolver.resolveConditions)(sub,subData,this.engine);const resolvedSup=(0,_conditionresolver.resolveConditions)(sup,supData??subData,this.engine);const result=this.check(resolvedSub.resolved,resolvedSup.resolved);return{...result,resolvedSub,resolvedSup}}normalize(def){return(0,_normalizer.normalize)(def)}formatResult(label,result){return(0,_formatter.formatResult)(label,result)}constructor(){_define_property(this,"engine",void 0);this.engine=new _mergeengine.MergeEngine}}
|
|
2
|
+
//# sourceMappingURL=json-schema-compatibility-checker.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/json-schema-compatibility-checker.ts"],"sourcesContent":["import type { JSONSchema7, JSONSchema7Definition } from \"json-schema\";\nimport { resolveConditions } from \"./condition-resolver\";\nimport { formatResult } from \"./formatter\";\nimport { MergeEngine } from \"./merge-engine\";\nimport { normalize } from \"./normalizer\";\nimport {\n\tarePatternsEquivalent,\n\tisPatternSubset,\n\tisTrivialPattern,\n} from \"./pattern-subset\";\nimport type { BranchResult, BranchType } from \"./subset-checker\";\nimport {\n\tcheckAtomic,\n\tcheckBranchedSub,\n\tcheckBranchedSup,\n\tgetBranchesTyped,\n\tisAtomicSubsetOf,\n} from \"./subset-checker\";\nimport type {\n\tConnectionResult,\n\tResolvedConditionResult,\n\tSchemaError,\n\tSubsetResult,\n} from \"./types\";\nimport { deepEqual } from \"./utils\";\n\n// ─── Re-exports ──────────────────────────────────────────────────────────────\n\nexport type {\n\tSchemaError,\n\tSubsetResult,\n\tConnectionResult,\n\tResolvedConditionResult,\n\tBranchType,\n\tBranchResult,\n};\n\nexport {\n\tnormalize,\n\tresolveConditions,\n\tformatResult,\n\tMergeEngine,\n\tisPatternSubset,\n\tarePatternsEquivalent,\n\tisTrivialPattern,\n};\n\n// ─── Main Class ──────────────────────────────────────────────────────────────\n//\n// Façade légère qui orchestre les sous-modules pour vérifier la compatibilité\n// entre JSON Schemas (Draft-07).\n//\n// Principe mathématique :\n// A ⊆ B ⟺ A ∩ B ≡ A\n//\n// En JSON Schema :\n// - A ∩ B = allOf([A, B]) résolu via merge\n// - ≡ = comparaison structurelle\n//\n// @example\n// ```ts\n// const checker = new JsonSchemaCompatibilityChecker();\n//\n// checker.isSubset(strict, loose); // true\n// checker.check(loose, strict); // { isSubset: false, diffs: [...] }\n// checker.canConnect(nodeA.output, nodeB.input); // ConnectionResult\n// ```\n\nexport class JsonSchemaCompatibilityChecker {\n\tprivate readonly engine: MergeEngine;\n\n\tconstructor() {\n\t\tthis.engine = new MergeEngine();\n\t}\n\n\t// ── Subset check (boolean) ─────────────────────────────────────────────\n\n\t/**\n\t * Vérifie si `sub ⊆ sup`.\n\t * Toute valeur valide pour sub est-elle aussi valide pour sup ?\n\t *\n\t * Point 6 — Utilise `getBranchesTyped` pour distinguer `anyOf` de `oneOf`\n\t * en interne, bien que le résultat boolean ne reflète pas la distinction.\n\t */\n\tisSubset(sub: JSONSchema7Definition, sup: JSONSchema7Definition): boolean {\n\t\t// ── Identity short-circuit ──\n\t\t// If sub and sup are the same reference, sub ⊆ sup is trivially true.\n\t\t// This avoids the entire normalize + merge + compare pipeline.\n\t\tif (sub === sup) return true;\n\n\t\t// ── Pre-normalize structural equality ──\n\t\t// If sub and sup are structurally identical before normalization,\n\t\t// they represent the same schema → sub ⊆ sup trivially.\n\t\t// This avoids the WeakMap overhead of normalize() for common cases\n\t\t// like {} ⊆ {} or identical schema objects with different references.\n\t\tif (deepEqual(sub, sup)) return true;\n\n\t\tconst nSub = normalize(sub);\n\t\tconst nSup = normalize(sup);\n\n\t\t// ── Post-normalize structural identity ──\n\t\t// After normalization, schemas that were syntactically different\n\t\t// but semantically equivalent become structurally equal\n\t\t// (e.g. {const:1} vs {const:1, type:\"integer\"}).\n\t\tif (nSub !== sub && nSup !== sup && deepEqual(nSub, nSup)) return true;\n\t\tif (nSub !== nSup && deepEqual(nSub, nSup)) return true;\n\n\t\tconst { branches: subBranches } = getBranchesTyped(nSub);\n\n\t\tif (subBranches.length > 1 || subBranches[0] !== nSub) {\n\t\t\treturn subBranches.every((branch) =>\n\t\t\t\tisAtomicSubsetOf(branch, nSup, this.engine),\n\t\t\t);\n\t\t}\n\n\t\treturn isAtomicSubsetOf(nSub, nSup, this.engine);\n\t}\n\n\t// ── Subset check (detailed) ────────────────────────────────────────────\n\n\t/**\n\t * Vérifie `sub ⊆ sup` et retourne un diagnostic complet\n\t * avec des erreurs sémantiques lisibles.\n\t *\n\t * Point 6 — Utilise `getBranchesTyped` pour distinguer `anyOf` de `oneOf`\n\t * dans les paths d'erreur.\n\t */\n\tcheck(sub: JSONSchema7Definition, sup: JSONSchema7Definition): SubsetResult {\n\t\t// ── Identity short-circuit ──\n\t\t// Same reference → no errors, no merge needed.\n\t\tif (sub === sup) {\n\t\t\treturn { isSubset: true, merged: sub, errors: [] };\n\t\t}\n\n\t\t// ── Pre-normalize structural equality ──\n\t\t// Avoids WeakMap overhead for identical schemas ({} ⊆ {}, etc.).\n\t\tif (deepEqual(sub, sup)) {\n\t\t\treturn { isSubset: true, merged: sub, errors: [] };\n\t\t}\n\n\t\tconst nSub = normalize(sub);\n\t\tconst nSup = normalize(sup);\n\n\t\t// ── Post-normalize structural identity ──\n\t\t// Catches semantically equivalent schemas after normalization.\n\t\tif (deepEqual(nSub, nSup)) {\n\t\t\treturn { isSubset: true, merged: nSub, errors: [] };\n\t\t}\n\n\t\tconst { branches: subBranches, type: subBranchType } =\n\t\t\tgetBranchesTyped(nSub);\n\t\tconst { branches: supBranches, type: supBranchType } =\n\t\t\tgetBranchesTyped(nSup);\n\n\t\t// anyOf/oneOf dans sub\n\t\tif (subBranches.length > 1 || subBranches[0] !== nSub) {\n\t\t\treturn checkBranchedSub(subBranches, nSup, this.engine, subBranchType);\n\t\t}\n\n\t\t// anyOf/oneOf dans sup uniquement\n\t\tif (supBranches.length > 1 || supBranches[0] !== nSup) {\n\t\t\treturn checkBranchedSup(nSub, supBranches, this.engine, supBranchType);\n\t\t}\n\n\t\t// Cas standard\n\t\treturn checkAtomic(nSub, nSup, this.engine);\n\t}\n\n\t// ── Connection check ───────────────────────────────────────────────────\n\n\t/**\n\t * Vérifie si la sortie d'un nœud source peut alimenter l'entrée d'un nœud cible.\n\t *\n\t * Sémantique : `sourceOutput ⊆ targetInput`\n\t * → Toute donnée produite par source sera acceptée par target.\n\t */\n\tcanConnect(\n\t\tsourceOutput: JSONSchema7Definition,\n\t\ttargetInput: JSONSchema7Definition,\n\t): ConnectionResult {\n\t\tconst result = this.check(sourceOutput, targetInput);\n\t\treturn { ...result, direction: \"sourceOutput ⊆ targetInput\" };\n\t}\n\n\t// ── Equality ───────────────────────────────────────────────────────────\n\n\t/**\n\t * Vérifie l'égalité structurelle entre deux schemas.\n\t */\n\tisEqual(a: JSONSchema7Definition, b: JSONSchema7Definition): boolean {\n\t\treturn this.engine.isEqual(normalize(a), normalize(b));\n\t}\n\n\t// ── Intersection ───────────────────────────────────────────────────────\n\n\t/**\n\t * Calcule l'intersection de deux schemas (allOf merge).\n\t * Retourne null si les schemas sont incompatibles.\n\t *\n\t * Le résultat est normalisé pour éliminer les artefacts structurels\n\t * du merge (ex: `enum` redondant quand `const` est présent).\n\t */\n\tintersect(\n\t\ta: JSONSchema7Definition,\n\t\tb: JSONSchema7Definition,\n\t): JSONSchema7Definition | null {\n\t\t// ── Identity short-circuit ──\n\t\t// If a and b are the same reference or structurally equal,\n\t\t// intersection is just normalize(a) — skip the merge entirely.\n\t\tif (a === b || deepEqual(a, b)) return normalize(a);\n\n\t\tconst nA = normalize(a);\n\t\tconst nB = normalize(b);\n\n\t\t// ── Post-normalize identity ──\n\t\tif (deepEqual(nA, nB)) return nA;\n\n\t\tconst merged = this.engine.merge(nA, nB);\n\t\tif (merged === null) return null;\n\t\t// Fast path: if merge result equals one of the normalized inputs,\n\t\t// it's already normalized — skip redundant normalize call.\n\t\tif (deepEqual(merged, nA) || deepEqual(merged, nB)) return merged;\n\t\treturn normalize(merged);\n\t}\n\n\t// ── Condition resolution ───────────────────────────────────────────────\n\n\t/**\n\t * Résout les `if/then/else` d'un schema en évaluant le `if` contre\n\t * des données partielles (discriminants).\n\t */\n\tresolveConditions(\n\t\tschema: JSONSchema7,\n\t\tdata: Record<string, unknown>,\n\t): ResolvedConditionResult {\n\t\treturn resolveConditions(schema, data, this.engine);\n\t}\n\n\t// ── Resolved check ────────────────────────────────────────────────────\n\n\t/**\n\t * Raccourci : résout les conditions des deux schemas puis vérifie sub ⊆ sup.\n\t *\n\t * Utile quand le superset contient des if/then/else et que tu connais\n\t * les valeurs discriminantes que le subset va produire.\n\t */\n\tcheckResolved(\n\t\tsub: JSONSchema7,\n\t\tsup: JSONSchema7,\n\t\tsubData: Record<string, unknown>,\n\t\tsupData?: Record<string, unknown>,\n\t): SubsetResult & {\n\t\tresolvedSub: ResolvedConditionResult;\n\t\tresolvedSup: ResolvedConditionResult;\n\t} {\n\t\tconst resolvedSub = resolveConditions(sub, subData, this.engine);\n\t\tconst resolvedSup = resolveConditions(sup, supData ?? subData, this.engine);\n\t\tconst result = this.check(resolvedSub.resolved, resolvedSup.resolved);\n\n\t\treturn { ...result, resolvedSub, resolvedSup };\n\t}\n\n\t// ── Normalization ──────────────────────────────────────────────────────\n\n\t/**\n\t * Normalise un schema : infère `type` depuis `const`/`enum`,\n\t * et normalise récursivement tous les sous-schemas.\n\t */\n\tnormalize(def: JSONSchema7Definition): JSONSchema7Definition {\n\t\treturn normalize(def);\n\t}\n\n\t// ── Formatting ─────────────────────────────────────────────────────────\n\n\t/**\n\t * Formate un SubsetResult en chaîne lisible (utile pour logs/debug).\n\t */\n\tformatResult(label: string, result: SubsetResult): string {\n\t\treturn formatResult(label, result);\n\t}\n}\n"],"names":["JsonSchemaCompatibilityChecker","MergeEngine","arePatternsEquivalent","formatResult","isPatternSubset","isTrivialPattern","normalize","resolveConditions","isSubset","sub","sup","deepEqual","nSub","nSup","branches","subBranches","getBranchesTyped","length","every","branch","isAtomicSubsetOf","engine","check","merged","errors","type","subBranchType","supBranches","supBranchType","checkBranchedSub","checkBranchedSup","checkAtomic","canConnect","sourceOutput","targetInput","result","direction","isEqual","a","b","intersect","nA","nB","merge","schema","data","checkResolved","subData","supData","resolvedSub","resolvedSup","resolved","def","label"],"mappings":"mPAoEaA,wCAAAA,oCA3BZC,qBAAAA,wBAAW,MAEXC,+BAAAA,oCAAqB,MAHrBC,sBAAAA,uBAAY,MAEZC,yBAAAA,8BAAe,MAEfC,0BAAAA,+BAAgB,MANhBC,mBAAAA,qBAAS,MACTC,2BAAAA,oCAAiB,qCAtCgB,iDACL,0CACD,4CACF,6CAKnB,iDAQA,yCAOmB,+LA4CnB,MAAMP,+BAgBZQ,SAASC,GAA0B,CAAEC,GAA0B,CAAW,CAIzE,GAAID,MAAQC,IAAK,OAAO,KAOxB,GAAIC,GAAAA,gBAAS,EAACF,IAAKC,KAAM,OAAO,KAEhC,MAAME,KAAON,GAAAA,qBAAS,EAACG,KACvB,MAAMI,KAAOP,GAAAA,qBAAS,EAACI,KAMvB,GAAIE,OAASH,KAAOI,OAASH,KAAOC,GAAAA,gBAAS,EAACC,KAAMC,MAAO,OAAO,KAClE,GAAID,OAASC,MAAQF,GAAAA,gBAAS,EAACC,KAAMC,MAAO,OAAO,KAEnD,KAAM,CAAEC,SAAUC,WAAW,CAAE,CAAGC,GAAAA,+BAAgB,EAACJ,MAEnD,GAAIG,YAAYE,MAAM,CAAG,GAAKF,WAAW,CAAC,EAAE,GAAKH,KAAM,CACtD,OAAOG,YAAYG,KAAK,CAAC,AAACC,QACzBC,GAAAA,+BAAgB,EAACD,OAAQN,KAAM,IAAI,CAACQ,MAAM,EAE5C,CAEA,MAAOD,GAAAA,+BAAgB,EAACR,KAAMC,KAAM,IAAI,CAACQ,MAAM,CAChD,CAWAC,MAAMb,GAA0B,CAAEC,GAA0B,CAAgB,CAG3E,GAAID,MAAQC,IAAK,CAChB,MAAO,CAAEF,SAAU,KAAMe,OAAQd,IAAKe,OAAQ,EAAE,AAAC,CAClD,CAIA,GAAIb,GAAAA,gBAAS,EAACF,IAAKC,KAAM,CACxB,MAAO,CAAEF,SAAU,KAAMe,OAAQd,IAAKe,OAAQ,EAAE,AAAC,CAClD,CAEA,MAAMZ,KAAON,GAAAA,qBAAS,EAACG,KACvB,MAAMI,KAAOP,GAAAA,qBAAS,EAACI,KAIvB,GAAIC,GAAAA,gBAAS,EAACC,KAAMC,MAAO,CAC1B,MAAO,CAAEL,SAAU,KAAMe,OAAQX,KAAMY,OAAQ,EAAE,AAAC,CACnD,CAEA,KAAM,CAAEV,SAAUC,WAAW,CAAEU,KAAMC,aAAa,CAAE,CACnDV,GAAAA,+BAAgB,EAACJ,MAClB,KAAM,CAAEE,SAAUa,WAAW,CAAEF,KAAMG,aAAa,CAAE,CACnDZ,GAAAA,+BAAgB,EAACH,MAGlB,GAAIE,YAAYE,MAAM,CAAG,GAAKF,WAAW,CAAC,EAAE,GAAKH,KAAM,CACtD,MAAOiB,GAAAA,+BAAgB,EAACd,YAAaF,KAAM,IAAI,CAACQ,MAAM,CAAEK,cACzD,CAGA,GAAIC,YAAYV,MAAM,CAAG,GAAKU,WAAW,CAAC,EAAE,GAAKd,KAAM,CACtD,MAAOiB,GAAAA,+BAAgB,EAAClB,KAAMe,YAAa,IAAI,CAACN,MAAM,CAAEO,cACzD,CAGA,MAAOG,GAAAA,0BAAW,EAACnB,KAAMC,KAAM,IAAI,CAACQ,MAAM,CAC3C,CAUAW,WACCC,YAAmC,CACnCC,WAAkC,CACf,CACnB,MAAMC,OAAS,IAAI,CAACb,KAAK,CAACW,aAAcC,aACxC,MAAO,CAAE,GAAGC,MAAM,CAAEC,UAAW,4BAA6B,CAC7D,CAOAC,QAAQC,CAAwB,CAAEC,CAAwB,CAAW,CACpE,OAAO,IAAI,CAAClB,MAAM,CAACgB,OAAO,CAAC/B,GAAAA,qBAAS,EAACgC,GAAIhC,GAAAA,qBAAS,EAACiC,GACpD,CAWAC,UACCF,CAAwB,CACxBC,CAAwB,CACO,CAI/B,GAAID,IAAMC,GAAK5B,GAAAA,gBAAS,EAAC2B,EAAGC,GAAI,MAAOjC,GAAAA,qBAAS,EAACgC,GAEjD,MAAMG,GAAKnC,GAAAA,qBAAS,EAACgC,GACrB,MAAMI,GAAKpC,GAAAA,qBAAS,EAACiC,GAGrB,GAAI5B,GAAAA,gBAAS,EAAC8B,GAAIC,IAAK,OAAOD,GAE9B,MAAMlB,OAAS,IAAI,CAACF,MAAM,CAACsB,KAAK,CAACF,GAAIC,IACrC,GAAInB,SAAW,KAAM,OAAO,KAG5B,GAAIZ,GAAAA,gBAAS,EAACY,OAAQkB,KAAO9B,GAAAA,gBAAS,EAACY,OAAQmB,IAAK,OAAOnB,OAC3D,MAAOjB,GAAAA,qBAAS,EAACiB,OAClB,CAQAhB,kBACCqC,MAAmB,CACnBC,IAA6B,CACH,CAC1B,MAAOtC,GAAAA,oCAAiB,EAACqC,OAAQC,KAAM,IAAI,CAACxB,MAAM,CACnD,CAUAyB,cACCrC,GAAgB,CAChBC,GAAgB,CAChBqC,OAAgC,CAChCC,OAAiC,CAIhC,CACD,MAAMC,YAAc1C,GAAAA,oCAAiB,EAACE,IAAKsC,QAAS,IAAI,CAAC1B,MAAM,EAC/D,MAAM6B,YAAc3C,GAAAA,oCAAiB,EAACG,IAAKsC,SAAWD,QAAS,IAAI,CAAC1B,MAAM,EAC1E,MAAMc,OAAS,IAAI,CAACb,KAAK,CAAC2B,YAAYE,QAAQ,CAAED,YAAYC,QAAQ,EAEpE,MAAO,CAAE,GAAGhB,MAAM,CAAEc,YAAaC,WAAY,CAC9C,CAQA5C,UAAU8C,GAA0B,CAAyB,CAC5D,MAAO9C,GAAAA,qBAAS,EAAC8C,IAClB,CAOAjD,aAAakD,KAAa,CAAElB,MAAoB,CAAU,CACzD,MAAOhC,GAAAA,uBAAY,EAACkD,MAAOlB,OAC5B,CAhNA,aAAc,CAFd,sBAAiBd,SAAjB,KAAA,EAGC,CAAA,IAAI,CAACA,MAAM,CAAG,IAAIpB,wBAAW,AAC9B,CA+MD"}
|
|
@@ -0,0 +1,2 @@
|
|
|
1
|
+
"use strict";Object.defineProperty(exports,"__esModule",{value:true});Object.defineProperty(exports,"MergeEngine",{enumerable:true,get:function(){return MergeEngine}});const _jsonschemamerge=require("@x0k/json-schema-merge");const _array=require("@x0k/json-schema-merge/lib/array");const _formatvalidator=require("./format-validator.js");const _utils=require("./utils.js");function _define_property(obj,key,value){if(key in obj){Object.defineProperty(obj,key,{value:value,enumerable:true,configurable:true,writable:true})}else{obj[key]=value}return obj}function hasConstConflict(a,b){if(typeof a==="boolean"||typeof b==="boolean")return false;const aHasConst=(0,_utils.hasOwn)(a,"const");const bHasConst=(0,_utils.hasOwn)(b,"const");const aConst=a.const;const bConst=b.const;const aEnum=a.enum;const bEnum=b.enum;if(aHasConst&&bHasConst){return!(0,_utils.deepEqual)(aConst,bConst)}if(aHasConst&&Array.isArray(bEnum)){return!bEnum.some(v=>(0,_utils.deepEqual)(v,aConst))}if(bHasConst&&Array.isArray(aEnum)){return!aEnum.some(v=>(0,_utils.deepEqual)(v,bConst))}return false}const SINGLE_SCHEMA_CONFLICT_KEYS=["items","additionalProperties","contains","propertyNames","not"];const PROPERTIES_MAP_CONFLICT_KEYS=["properties","patternProperties"];function hasDeepConstConflict(a,b){if(hasConstConflict(a,b))return true;if(typeof a==="boolean"||typeof b==="boolean")return false;for(const key of SINGLE_SCHEMA_CONFLICT_KEYS){const aVal=a[key];const bVal=b[key];if((0,_utils.isPlainObj)(aVal)&&(0,_utils.isPlainObj)(bVal)&&hasDeepConstConflict(aVal,bVal)){return true}}for(const key of PROPERTIES_MAP_CONFLICT_KEYS){const aMap=a[key];const bMap=b[key];if(!(0,_utils.isPlainObj)(aMap)||!(0,_utils.isPlainObj)(bMap))continue;const aMapSafe=aMap;const bMapSafe=bMap;for(const propKey of Object.keys(aMapSafe)){const aVal=aMapSafe[propKey];const bVal=bMapSafe[propKey];if(aVal!==undefined&&bVal!==undefined&&(0,_utils.hasOwn)(bMapSafe,propKey)&&hasDeepConstConflict(aVal,bVal)){return true}}}if(Array.isArray(a.items)&&Array.isArray(b.items)){const aItems=a.items;const bItems=b.items;const len=Math.min(aItems.length,bItems.length);for(let i=0;i<len;i++){const aItem=aItems[i];const bItem=bItems[i];if(aItem===undefined||bItem===undefined)continue;if(hasDeepConstConflict(aItem,bItem)){return true}}}return false}function hasAdditionalPropertiesConflict(a,b){if(typeof a==="boolean"||typeof b==="boolean")return false;const aProps=(0,_utils.isPlainObj)(a.properties)?a.properties:undefined;const bProps=(0,_utils.isPlainObj)(b.properties)?b.properties:undefined;if(!aProps&&!bProps)return false;const aKeys=aProps?Object.keys(aProps):[];const bKeys=bProps?Object.keys(bProps):[];const aRequired=Array.isArray(a.required)?a.required:[];const bRequired=Array.isArray(b.required)?b.required:[];if(a.additionalProperties===false&&aProps&&bProps){const hasRequiredExtra=bRequired.some(k=>!(0,_utils.hasOwn)(aProps,k)&&(0,_utils.hasOwn)(bProps,k));if(hasRequiredExtra&&aKeys.length>0)return true}if((0,_utils.isPlainObj)(a.additionalProperties)&&typeof a.additionalProperties!=="boolean"&&aProps&&bProps){const addPropsSchema=a.additionalProperties;if((0,_utils.hasOwn)(addPropsSchema,"type")){const addPropsType=addPropsSchema.type;const hasTypeConflict=bRequired.some(k=>{if((0,_utils.hasOwn)(aProps,k))return false;if(!(0,_utils.hasOwn)(bProps,k))return false;const bPropDef=bProps[k];if(typeof bPropDef==="boolean")return false;const bProp=bPropDef;if(!(0,_utils.hasOwn)(bProp,"type"))return false;if(typeof addPropsType==="string"&&typeof bProp.type==="string"){return addPropsType!==bProp.type&&!(addPropsType==="number"&&bProp.type==="integer")&&!(addPropsType==="integer"&&bProp.type==="number")}return false});if(hasTypeConflict)return true}}if(b.additionalProperties===false&&bProps&&aProps){const hasRequiredExtra=aRequired.some(k=>!(0,_utils.hasOwn)(bProps,k)&&(0,_utils.hasOwn)(aProps,k));if(hasRequiredExtra&&bKeys.length>0)return true}if((0,_utils.isPlainObj)(b.additionalProperties)&&typeof b.additionalProperties!=="boolean"&&bProps&&aProps){const addPropsSchema=b.additionalProperties;if((0,_utils.hasOwn)(addPropsSchema,"type")){const addPropsType=addPropsSchema.type;const hasTypeConflict=aRequired.some(k=>{if((0,_utils.hasOwn)(bProps,k))return false;if(!(0,_utils.hasOwn)(aProps,k))return false;const aPropDef=aProps[k];if(typeof aPropDef==="boolean")return false;const aProp=aPropDef;if(!(0,_utils.hasOwn)(aProp,"type"))return false;if(typeof addPropsType==="string"&&typeof aProp.type==="string"){return addPropsType!==aProp.type&&!(addPropsType==="number"&&aProp.type==="integer")&&!(addPropsType==="integer"&&aProp.type==="number")}return false});if(hasTypeConflict)return true}}if(aProps&&bProps){for(const k of aKeys){if(!(0,_utils.hasOwn)(bProps,k))continue;const aPropDef=aProps[k];const bPropDef=bProps[k];if(typeof aPropDef==="boolean"||typeof bPropDef==="boolean")continue;if(hasAdditionalPropertiesConflict(aPropDef,bPropDef)){return true}}}return false}function hasFormatConflict(a,b){if(typeof a==="boolean"||typeof b==="boolean")return false;if((0,_utils.hasOwn)(a,"format")&&(0,_utils.hasOwn)(b,"format")){const aFormat=a.format;const bFormat=b.format;if(aFormat!==bFormat){const subsetCheck=(0,_formatvalidator.isFormatSubset)(aFormat,bFormat);if(subsetCheck!==true){const reverseCheck=(0,_formatvalidator.isFormatSubset)(bFormat,aFormat);if(reverseCheck!==true){return true}}}}if((0,_utils.isPlainObj)(a.properties)&&(0,_utils.isPlainObj)(b.properties)){const aMap=a.properties;const bMap=b.properties;for(const k of Object.keys(aMap)){const aVal=aMap[k];const bVal=bMap[k];if(aVal!==undefined&&bVal!==undefined&&(0,_utils.hasOwn)(bMap,k)&&hasFormatConflict(aVal,bVal)){return true}}}if((0,_utils.isPlainObj)(a.items)&&(0,_utils.isPlainObj)(b.items)){if(hasFormatConflict(a.items,b.items))return true}if((0,_utils.isPlainObj)(a.additionalProperties)&&(0,_utils.isPlainObj)(b.additionalProperties)){if(hasFormatConflict(a.additionalProperties,b.additionalProperties))return true}return false}class MergeEngine{merge(a,b){if(hasDeepConstConflict(a,b)){return null}if(hasFormatConflict(a,b)){return null}if(hasAdditionalPropertiesConflict(a,b)){return null}try{return this.shallowAllOfMergeFn({allOf:[a,b]})}catch{return null}}mergeOrThrow(a,b){if(hasDeepConstConflict(a,b)){throw new Error("Incompatible const values: schemas have conflicting const constraints")}if(hasFormatConflict(a,b)){throw new Error("Incompatible format values: schemas have conflicting format constraints")}if(hasAdditionalPropertiesConflict(a,b)){throw new Error("Incompatible additionalProperties: required properties conflict with additionalProperties constraint")}return this.shallowAllOfMergeFn({allOf:[a,b]})}compare(a,b){return this.compareFn(a,b)}isEqual(a,b){return this.compareFn(a,b)===0}constructor(){_define_property(this,"compareFn",void 0);_define_property(this,"shallowAllOfMergeFn",void 0);const{compareSchemaDefinitions,compareSchemaValues}=(0,_jsonschemamerge.createComparator)();const safeCompareSchemaValues=(a,b)=>{if(a===null&&b===null)return 0;return compareSchemaValues(a,b)};const{mergeArrayOfSchemaDefinitions}=(0,_jsonschemamerge.createMerger)({intersectJson:(0,_array.createIntersector)(safeCompareSchemaValues),deduplicateJsonSchemaDef:(0,_array.createDeduplicator)(compareSchemaDefinitions)});this.compareFn=compareSchemaDefinitions;this.shallowAllOfMergeFn=(0,_jsonschemamerge.createShallowAllOfMerge)(mergeArrayOfSchemaDefinitions)}}
|
|
2
|
+
//# sourceMappingURL=merge-engine.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/merge-engine.ts"],"sourcesContent":["import {\n\tcreateComparator,\n\tcreateMerger,\n\tcreateShallowAllOfMerge,\n} from \"@x0k/json-schema-merge\";\nimport {\n\tcreateDeduplicator,\n\tcreateIntersector,\n} from \"@x0k/json-schema-merge/lib/array\";\n\nimport type {\n\tJSONSchema7,\n\tJSONSchema7Definition,\n\tJSONSchema7Type,\n} from \"json-schema\";\n\nimport { isFormatSubset } from \"./format-validator\";\nimport { deepEqual, hasOwn, isPlainObj } from \"./utils\";\n\n// ─── Merge Engine ────────────────────────────────────────────────────────────\n//\n// Encapsule la librairie `@x0k/json-schema-merge` et expose une API simple\n// pour merger et comparer des JSON Schemas.\n//\n// Principe mathématique :\n// A ∩ B = allOf([A, B]) résolu via shallow merge\n// A ≡ B ⟺ compare(A, B) === 0\n//\n// Pré-checks avant merge :\n// - `hasDeepConstConflict` : détecte les conflits de `const`/`enum`\n// - `hasAdditionalPropertiesConflict` : détecte les conflits `additionalProperties`\n// - `hasFormatConflict` : détecte les conflits de `format` entre deux schemas\n\n// ─── Const conflict detection ────────────────────────────────────────────────\n\n/**\n * Détecte un conflit de `const` entre deux schemas.\n *\n * Cas 1 — const vs const : les deux schemas ont un `const` avec des valeurs\n * différentes → intersection vide.\n *\n * Cas 2 — const vs enum : un schema a `const`, l'autre a `enum`.\n * Si la valeur de `const` n'est pas dans l'`enum` → intersection vide.\n *\n * Utilise `lodash/isEqual` pour la comparaison profonde (objets, tableaux).\n */\nfunction hasConstConflict(\n\ta: JSONSchema7Definition,\n\tb: JSONSchema7Definition,\n): boolean {\n\tif (typeof a === \"boolean\" || typeof b === \"boolean\") return false;\n\n\tconst aHasConst = hasOwn(a, \"const\");\n\tconst bHasConst = hasOwn(b, \"const\");\n\tconst aConst = (a as Record<string, unknown>).const;\n\tconst bConst = (b as Record<string, unknown>).const;\n\tconst aEnum = a.enum as unknown[] | undefined;\n\tconst bEnum = b.enum as unknown[] | undefined;\n\n\t// Cas 1 — const vs const\n\tif (aHasConst && bHasConst) {\n\t\treturn !deepEqual(aConst, bConst);\n\t}\n\n\t// Cas 2 — const vs enum\n\tif (aHasConst && Array.isArray(bEnum)) {\n\t\treturn !bEnum.some((v) => deepEqual(v, aConst));\n\t}\n\tif (bHasConst && Array.isArray(aEnum)) {\n\t\treturn !aEnum.some((v) => deepEqual(v, bConst));\n\t}\n\n\treturn false;\n}\n\n/** Mots-clés contenant un unique sous-schema à vérifier récursivement */\nconst SINGLE_SCHEMA_CONFLICT_KEYS = [\n\t\"items\",\n\t\"additionalProperties\",\n\t\"contains\",\n\t\"propertyNames\",\n\t\"not\",\n] as const;\n\n/** Mots-clés contenant un Record<string, JSONSchema7Definition> */\nconst PROPERTIES_MAP_CONFLICT_KEYS = [\n\t\"properties\",\n\t\"patternProperties\",\n] as const;\n\n/**\n * Détecte récursivement les conflits de `const` dans les sous-schemas.\n *\n * Quand la librairie de merge fait un shallow merge, les sous-schemas\n * imbriqués peuvent aussi avoir des conflits de `const` masqués\n * (elle utilise `identity` pour `const`).\n *\n * Récurse dans :\n * - `properties`, `patternProperties` (clés communes)\n * - `items` (single schema), tuple `items` (par index)\n * - `additionalProperties`, `contains`, `propertyNames`, `not`\n */\nfunction hasDeepConstConflict(\n\ta: JSONSchema7Definition,\n\tb: JSONSchema7Definition,\n): boolean {\n\tif (hasConstConflict(a, b)) return true;\n\n\tif (typeof a === \"boolean\" || typeof b === \"boolean\") return false;\n\n\t// ── Single sub-schema keywords ──\n\tfor (const key of SINGLE_SCHEMA_CONFLICT_KEYS) {\n\t\tconst aVal = (a as Record<string, unknown>)[key] as\n\t\t\t| JSONSchema7Definition\n\t\t\t| undefined;\n\t\tconst bVal = (b as Record<string, unknown>)[key] as\n\t\t\t| JSONSchema7Definition\n\t\t\t| undefined;\n\t\tif (\n\t\t\tisPlainObj(aVal) &&\n\t\t\tisPlainObj(bVal) &&\n\t\t\thasDeepConstConflict(\n\t\t\t\taVal as JSONSchema7Definition,\n\t\t\t\tbVal as JSONSchema7Definition,\n\t\t\t)\n\t\t) {\n\t\t\treturn true;\n\t\t}\n\t}\n\n\t// ── Properties-like maps (properties, patternProperties) ──\n\tfor (const key of PROPERTIES_MAP_CONFLICT_KEYS) {\n\t\tconst aMap = (a as Record<string, unknown>)[key] as\n\t\t\t| Record<string, JSONSchema7Definition>\n\t\t\t| undefined;\n\t\tconst bMap = (b as Record<string, unknown>)[key] as\n\t\t\t| Record<string, JSONSchema7Definition>\n\t\t\t| undefined;\n\t\tif (!isPlainObj(aMap) || !isPlainObj(bMap)) continue;\n\t\tconst aMapSafe = aMap as Record<string, JSONSchema7Definition>;\n\t\tconst bMapSafe = bMap as Record<string, JSONSchema7Definition>;\n\t\tfor (const propKey of Object.keys(aMapSafe)) {\n\t\t\tconst aVal = aMapSafe[propKey];\n\t\t\tconst bVal = bMapSafe[propKey];\n\t\t\tif (\n\t\t\t\taVal !== undefined &&\n\t\t\t\tbVal !== undefined &&\n\t\t\t\thasOwn(bMapSafe, propKey) &&\n\t\t\t\thasDeepConstConflict(aVal, bVal)\n\t\t\t) {\n\t\t\t\treturn true;\n\t\t\t}\n\t\t}\n\t}\n\n\t// ── Tuple items (array of schemas, compared by index) ──\n\tif (Array.isArray(a.items) && Array.isArray(b.items)) {\n\t\tconst aItems = a.items as JSONSchema7Definition[];\n\t\tconst bItems = b.items as JSONSchema7Definition[];\n\t\tconst len = Math.min(aItems.length, bItems.length);\n\t\tfor (let i = 0; i < len; i++) {\n\t\t\tconst aItem = aItems[i];\n\t\t\tconst bItem = bItems[i];\n\t\t\tif (aItem === undefined || bItem === undefined) continue;\n\t\t\tif (hasDeepConstConflict(aItem, bItem)) {\n\t\t\t\treturn true;\n\t\t\t}\n\t\t}\n\t}\n\n\treturn false;\n}\n\n// ─── additionalProperties conflict detection ─────────────────────────────────\n\n/**\n * Détecte un conflit entre `additionalProperties` et les propriétés extra\n * **requises** de l'autre schema.\n *\n * ⚠️ Cette fonction est **ultra-conservatrice** : elle ne détecte que les\n * conflits où une propriété est à la fois :\n * - INTERDITE par `additionalProperties: false` d'un côté\n * - REQUISE (`required`) par l'autre côté\n * - ABSENTE des `properties` du côté restrictif\n * - ET le côté restrictif AUSSI a un `required` qui rend l'objet non-vide\n * (sinon la librairie gère déjà le cas en excluant les propriétés extra)\n *\n * La librairie de merge (`@x0k/json-schema-merge`) gère DÉJÀ correctement\n * le cas `additionalProperties: false` avec des propriétés simplement DÉFINIES\n * (non requises) dans l'autre schema — elle les exclut du résultat.\n * On ne détecte donc QUE les contradictions `required` impossibles à résoudre.\n *\n * Cas gérés :\n * 1. `a` a `additionalProperties: false` et `b` REQUIERT des propriétés\n * absentes de `a.properties`, ET ces propriétés sont dans `b.properties`\n * → conflit certain (intersection vide car b exige, a interdit)\n * 2. Symétrique pour `b.additionalProperties: false`\n * 3. `additionalProperties` comme schema → vérifier la compatibilité de type\n * des propriétés extra REQUISES uniquement\n * 4. Récursion dans les propriétés communes (sous-objets)\n *\n * ⚠️ Ne vérifie que les clés de `properties`, pas les `patternProperties`\n * (trop complexe à résoudre statiquement).\n *\n * Retourne `true` si un conflit évident est détecté, `false` sinon.\n * En cas de doute → `false` (conservateur, laisser le merge décider).\n *\n * Utilise `_.keys`, `_.some`, `_.every`, `_.has`, `_.get`, `_.isPlainObject`,\n * `_.includes` pour des vérifications concises.\n */\nfunction hasAdditionalPropertiesConflict(\n\ta: JSONSchema7Definition,\n\tb: JSONSchema7Definition,\n): boolean {\n\tif (typeof a === \"boolean\" || typeof b === \"boolean\") return false;\n\n\tconst aProps = isPlainObj(a.properties)\n\t\t? (a.properties as Record<string, JSONSchema7Definition>)\n\t\t: undefined;\n\tconst bProps = isPlainObj(b.properties)\n\t\t? (b.properties as Record<string, JSONSchema7Definition>)\n\t\t: undefined;\n\n\t// Si aucun des deux n'a de properties, on ne peut rien déterminer\n\tif (!aProps && !bProps) return false;\n\n\tconst aKeys = aProps ? Object.keys(aProps) : [];\n\tconst bKeys = bProps ? Object.keys(bProps) : [];\n\tconst aRequired = Array.isArray(a.required) ? (a.required as string[]) : [];\n\tconst bRequired = Array.isArray(b.required) ? (b.required as string[]) : [];\n\n\t// ── Vérifier additionalProperties: false de a vs propriétés REQUISES extra de b ──\n\t// Condition stricte : b doit DÉFINIR la propriété dans b.properties ET la\n\t// REQUÉRIR dans b.required, ET cette propriété doit être ABSENTE de a.properties.\n\t// De plus, a doit lui-même avoir des propriétés (sinon on ne peut rien dire).\n\tif (a.additionalProperties === false && aProps && bProps) {\n\t\tconst hasRequiredExtra = bRequired.some(\n\t\t\t(k) => !hasOwn(aProps, k) && hasOwn(bProps, k),\n\t\t);\n\t\t// Ne détecter le conflit que si a a aussi un required qui rend l'objet\n\t\t// structurellement contraint (pas un schema vague)\n\t\tif (hasRequiredExtra && aKeys.length > 0) return true;\n\t}\n\n\t// ── Vérification du cas additionalProperties comme schema ──\n\t// Si a.additionalProperties est un schema avec un type, et que b REQUIERT\n\t// une propriété extra dont le type est incompatible → conflit\n\tif (\n\t\tisPlainObj(a.additionalProperties) &&\n\t\ttypeof a.additionalProperties !== \"boolean\" &&\n\t\taProps &&\n\t\tbProps\n\t) {\n\t\tconst addPropsSchema = a.additionalProperties as JSONSchema7;\n\t\tif (hasOwn(addPropsSchema, \"type\")) {\n\t\t\tconst addPropsType = addPropsSchema.type;\n\t\t\tconst hasTypeConflict = bRequired.some((k) => {\n\t\t\t\tif (hasOwn(aProps, k)) return false;\n\t\t\t\tif (!hasOwn(bProps, k)) return false;\n\t\t\t\tconst bPropDef = bProps[k];\n\t\t\t\tif (typeof bPropDef === \"boolean\") return false;\n\t\t\t\tconst bProp = bPropDef as JSONSchema7;\n\t\t\t\tif (!hasOwn(bProp, \"type\")) return false;\n\t\t\t\tif (\n\t\t\t\t\ttypeof addPropsType === \"string\" &&\n\t\t\t\t\ttypeof bProp.type === \"string\"\n\t\t\t\t) {\n\t\t\t\t\treturn (\n\t\t\t\t\t\taddPropsType !== bProp.type &&\n\t\t\t\t\t\t!(addPropsType === \"number\" && bProp.type === \"integer\") &&\n\t\t\t\t\t\t!(addPropsType === \"integer\" && bProp.type === \"number\")\n\t\t\t\t\t);\n\t\t\t\t}\n\t\t\t\treturn false;\n\t\t\t});\n\t\t\tif (hasTypeConflict) return true;\n\t\t}\n\t}\n\n\t// ── Vérification symétrique : additionalProperties de b vs propriétés REQUISES extra de a ──\n\tif (b.additionalProperties === false && bProps && aProps) {\n\t\tconst hasRequiredExtra = aRequired.some(\n\t\t\t(k) => !hasOwn(bProps, k) && hasOwn(aProps, k),\n\t\t);\n\t\tif (hasRequiredExtra && bKeys.length > 0) return true;\n\t}\n\n\t// Symétrique pour additionalProperties comme schema\n\tif (\n\t\tisPlainObj(b.additionalProperties) &&\n\t\ttypeof b.additionalProperties !== \"boolean\" &&\n\t\tbProps &&\n\t\taProps\n\t) {\n\t\tconst addPropsSchema = b.additionalProperties as JSONSchema7;\n\t\tif (hasOwn(addPropsSchema, \"type\")) {\n\t\t\tconst addPropsType = addPropsSchema.type;\n\t\t\tconst hasTypeConflict = aRequired.some((k) => {\n\t\t\t\tif (hasOwn(bProps, k)) return false;\n\t\t\t\tif (!hasOwn(aProps, k)) return false;\n\t\t\t\tconst aPropDef = aProps[k];\n\t\t\t\tif (typeof aPropDef === \"boolean\") return false;\n\t\t\t\tconst aProp = aPropDef as JSONSchema7;\n\t\t\t\tif (!hasOwn(aProp, \"type\")) return false;\n\t\t\t\tif (\n\t\t\t\t\ttypeof addPropsType === \"string\" &&\n\t\t\t\t\ttypeof aProp.type === \"string\"\n\t\t\t\t) {\n\t\t\t\t\treturn (\n\t\t\t\t\t\taddPropsType !== aProp.type &&\n\t\t\t\t\t\t!(addPropsType === \"number\" && aProp.type === \"integer\") &&\n\t\t\t\t\t\t!(addPropsType === \"integer\" && aProp.type === \"number\")\n\t\t\t\t\t);\n\t\t\t\t}\n\t\t\t\treturn false;\n\t\t\t});\n\t\t\tif (hasTypeConflict) return true;\n\t\t}\n\t}\n\n\t// ── Récursion dans les propriétés communes ──\n\t// Si les deux schemas ont des propriétés communes qui sont des objets,\n\t// vérifier récursivement les conflits additionalProperties\n\tif (aProps && bProps) {\n\t\tfor (const k of aKeys) {\n\t\t\tif (!hasOwn(bProps, k)) continue;\n\t\t\tconst aPropDef = aProps[k];\n\t\t\tconst bPropDef = bProps[k];\n\t\t\tif (typeof aPropDef === \"boolean\" || typeof bPropDef === \"boolean\")\n\t\t\t\tcontinue;\n\t\t\tif (\n\t\t\t\thasAdditionalPropertiesConflict(\n\t\t\t\t\taPropDef as JSONSchema7Definition,\n\t\t\t\t\tbPropDef as JSONSchema7Definition,\n\t\t\t\t)\n\t\t\t) {\n\t\t\t\treturn true;\n\t\t\t}\n\t\t}\n\t}\n\n\treturn false;\n}\n\n// ─── Format conflict detection ───────────────────────────────────────────────\n\n/**\n * Détecte un conflit de format entre deux schemas.\n *\n * ⚠️ Ne se déclenche QUE quand les DEUX schemas ont un `format`.\n * Si un seul schema a un `format`, il n'y a PAS de conflit — le merge\n * engine gère nativement ce cas (le format est conservé dans l'intersection,\n * et la comparaison `merged ≡ sub` détermine correctement la relation ⊆).\n *\n * Deux schemas avec des formats différents et sans relation d'inclusion\n * connue ont une intersection vide (ex: \"email\" ∩ \"ipv4\" = ∅).\n *\n * Utilise `isFormatSubset` de `format-validator.ts` pour vérifier la hiérarchie.\n *\n * Récurse dans les sous-schemas (`properties`, `items`, etc.) pour détecter\n * les conflits de format imbriqués.\n *\n * @returns `true` si un conflit de format est détecté, `false` sinon\n */\nfunction hasFormatConflict(\n\ta: JSONSchema7Definition,\n\tb: JSONSchema7Definition,\n): boolean {\n\tif (typeof a === \"boolean\" || typeof b === \"boolean\") return false;\n\n\t// ── Seulement quand LES DEUX ont un format ──\n\t// Si un seul a un format → pas de conflit, le merge gère nativement\n\tif (hasOwn(a, \"format\") && hasOwn(b, \"format\")) {\n\t\tconst aFormat = a.format as string;\n\t\tconst bFormat = b.format as string;\n\n\t\t// Même format → pas de conflit\n\t\tif (aFormat !== bFormat) {\n\t\t\t// Vérifier si l'un est un sous-ensemble de l'autre via la hiérarchie\n\t\t\tconst subsetCheck = isFormatSubset(aFormat, bFormat);\n\t\t\tif (subsetCheck !== true) {\n\t\t\t\tconst reverseCheck = isFormatSubset(bFormat, aFormat);\n\t\t\t\tif (reverseCheck !== true) {\n\t\t\t\t\t// Formats différents sans relation connue → conflit\n\t\t\t\t\treturn true;\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\t// ── Récursion dans les sous-schemas ──\n\t// Vérifier les conflits de format dans les propriétés communes\n\tif (isPlainObj(a.properties) && isPlainObj(b.properties)) {\n\t\tconst aMap = a.properties as Record<string, JSONSchema7Definition>;\n\t\tconst bMap = b.properties as Record<string, JSONSchema7Definition>;\n\t\tfor (const k of Object.keys(aMap)) {\n\t\t\tconst aVal = aMap[k];\n\t\t\tconst bVal = bMap[k];\n\t\t\tif (\n\t\t\t\taVal !== undefined &&\n\t\t\t\tbVal !== undefined &&\n\t\t\t\thasOwn(bMap, k) &&\n\t\t\t\thasFormatConflict(aVal, bVal)\n\t\t\t) {\n\t\t\t\treturn true;\n\t\t\t}\n\t\t}\n\t}\n\n\t// Vérifier items (single schema)\n\tif (isPlainObj(a.items) && isPlainObj(b.items)) {\n\t\tif (\n\t\t\thasFormatConflict(\n\t\t\t\ta.items as JSONSchema7Definition,\n\t\t\t\tb.items as JSONSchema7Definition,\n\t\t\t)\n\t\t)\n\t\t\treturn true;\n\t}\n\n\t// Vérifier additionalProperties\n\tif (\n\t\tisPlainObj(a.additionalProperties) &&\n\t\tisPlainObj(b.additionalProperties)\n\t) {\n\t\tif (\n\t\t\thasFormatConflict(\n\t\t\t\ta.additionalProperties as JSONSchema7Definition,\n\t\t\t\tb.additionalProperties as JSONSchema7Definition,\n\t\t\t)\n\t\t)\n\t\t\treturn true;\n\t}\n\n\treturn false;\n}\n\n// ─── MergeEngine class ───────────────────────────────────────────────────────\n\nexport class MergeEngine {\n\tprivate readonly compareFn: (\n\t\ta: JSONSchema7Definition,\n\t\tb: JSONSchema7Definition,\n\t) => number;\n\n\tprivate readonly shallowAllOfMergeFn: (\n\t\tschema: JSONSchema7 & { allOf: JSONSchema7Definition[] },\n\t) => JSONSchema7Definition;\n\n\tconstructor() {\n\t\tconst { compareSchemaDefinitions, compareSchemaValues } =\n\t\t\tcreateComparator();\n\n\t\t// ── Null-safe wrapper for compareSchemaValues ──\n\t\t// The library's compareSchemaValues has a bug: when both a and b are null,\n\t\t// it returns -1 instead of 0 (the null check for `a` fires before checking\n\t\t// if `b` is also null). This causes createIntersector to lose null values\n\t\t// during enum intersection (the sort-merge join relies on compare(x,x)===0).\n\t\tconst safeCompareSchemaValues = (\n\t\t\ta: JSONSchema7Type,\n\t\t\tb: JSONSchema7Type,\n\t\t): number => {\n\t\t\tif (a === null && b === null) return 0;\n\t\t\treturn compareSchemaValues(a, b);\n\t\t};\n\n\t\tconst { mergeArrayOfSchemaDefinitions } = createMerger({\n\t\t\tintersectJson: createIntersector(safeCompareSchemaValues),\n\t\t\tdeduplicateJsonSchemaDef: createDeduplicator(compareSchemaDefinitions),\n\t\t});\n\n\t\tthis.compareFn = compareSchemaDefinitions;\n\t\tthis.shallowAllOfMergeFn = createShallowAllOfMerge(\n\t\t\tmergeArrayOfSchemaDefinitions,\n\t\t);\n\t}\n\n\t/**\n\t * Merge deux schemas via `allOf([a, b])`.\n\t * Retourne `null` si les schemas sont incompatibles.\n\t *\n\t * Post-merge : détecte les conflits de `const` que la librairie\n\t * ne capture pas (elle utilise `identity` pour `const`).\n\t */\n\tmerge(\n\t\ta: JSONSchema7Definition,\n\t\tb: JSONSchema7Definition,\n\t): JSONSchema7Definition | null {\n\t\t// Pré-check : conflit de const détectable avant le merge\n\t\tif (hasDeepConstConflict(a, b)) {\n\t\t\treturn null;\n\t\t}\n\n\t\t// Pré-check : conflit de format (les DEUX ont un format incompatible)\n\t\tif (hasFormatConflict(a, b)) {\n\t\t\treturn null;\n\t\t}\n\n\t\t// Pré-check : conflit additionalProperties vs propriétés REQUISES extra\n\t\t// Ne détecte que les cas où une propriété est à la fois interdite\n\t\t// (additionalProperties: false) et requise (required) → intersection vide.\n\t\t// Les cas où les propriétés sont simplement définies sans être requises\n\t\t// sont gérés correctement par la librairie de merge elle-même.\n\t\tif (hasAdditionalPropertiesConflict(a, b)) {\n\t\t\treturn null;\n\t\t}\n\n\t\ttry {\n\t\t\treturn this.shallowAllOfMergeFn({ allOf: [a, b] });\n\t\t} catch {\n\t\t\treturn null;\n\t\t}\n\t}\n\n\t/**\n\t * Merge via `shallowAllOfMerge` — lève une exception si incompatible.\n\t * Utile quand on veut capturer l'erreur pour le diagnostic.\n\t *\n\t * Post-merge : détecte les conflits de `const` et lève une exception.\n\t */\n\tmergeOrThrow(\n\t\ta: JSONSchema7Definition,\n\t\tb: JSONSchema7Definition,\n\t): JSONSchema7Definition {\n\t\t// Pré-check : conflit de const\n\t\tif (hasDeepConstConflict(a, b)) {\n\t\t\tthrow new Error(\n\t\t\t\t\"Incompatible const values: schemas have conflicting const constraints\",\n\t\t\t);\n\t\t}\n\n\t\t// Pré-check : conflit de format\n\t\tif (hasFormatConflict(a, b)) {\n\t\t\tthrow new Error(\n\t\t\t\t\"Incompatible format values: schemas have conflicting format constraints\",\n\t\t\t);\n\t\t}\n\n\t\t// Pré-check : conflit additionalProperties vs propriétés REQUISES extra\n\t\tif (hasAdditionalPropertiesConflict(a, b)) {\n\t\t\tthrow new Error(\n\t\t\t\t\"Incompatible additionalProperties: required properties conflict with additionalProperties constraint\",\n\t\t\t);\n\t\t}\n\n\t\treturn this.shallowAllOfMergeFn({ allOf: [a, b] });\n\t}\n\n\t/**\n\t * Compare structurellement deux schema definitions.\n\t * Retourne 0 si elles sont identiques, sinon un entier non nul.\n\t */\n\tcompare(a: JSONSchema7Definition, b: JSONSchema7Definition): number {\n\t\treturn this.compareFn(a, b);\n\t}\n\n\t/**\n\t * Vérifie l'égalité structurelle entre deux schema definitions.\n\t */\n\tisEqual(a: JSONSchema7Definition, b: JSONSchema7Definition): boolean {\n\t\treturn this.compareFn(a, b) === 0;\n\t}\n}\n"],"names":["MergeEngine","hasConstConflict","a","b","aHasConst","hasOwn","bHasConst","aConst","const","bConst","aEnum","enum","bEnum","deepEqual","Array","isArray","some","v","SINGLE_SCHEMA_CONFLICT_KEYS","PROPERTIES_MAP_CONFLICT_KEYS","hasDeepConstConflict","key","aVal","bVal","isPlainObj","aMap","bMap","aMapSafe","bMapSafe","propKey","Object","keys","undefined","items","aItems","bItems","len","Math","min","length","i","aItem","bItem","hasAdditionalPropertiesConflict","aProps","properties","bProps","aKeys","bKeys","aRequired","required","bRequired","additionalProperties","hasRequiredExtra","k","addPropsSchema","addPropsType","type","hasTypeConflict","bPropDef","bProp","aPropDef","aProp","hasFormatConflict","aFormat","format","bFormat","subsetCheck","isFormatSubset","reverseCheck","merge","shallowAllOfMergeFn","allOf","mergeOrThrow","Error","compare","compareFn","isEqual","compareSchemaDefinitions","compareSchemaValues","createComparator","safeCompareSchemaValues","mergeArrayOfSchemaDefinitions","createMerger","intersectJson","createIntersector","deduplicateJsonSchemaDef","createDeduplicator","createShallowAllOfMerge"],"mappings":"oGAubaA,qDAAAA,8CAnbN,+CAIA,mEAQwB,2CACe,+LA6B9C,SAASC,iBACRC,CAAwB,CACxBC,CAAwB,EAExB,GAAI,OAAOD,IAAM,WAAa,OAAOC,IAAM,UAAW,OAAO,MAE7D,MAAMC,UAAYC,GAAAA,aAAM,EAACH,EAAG,SAC5B,MAAMI,UAAYD,GAAAA,aAAM,EAACF,EAAG,SAC5B,MAAMI,OAAS,AAACL,EAA8BM,KAAK,CACnD,MAAMC,OAAS,AAACN,EAA8BK,KAAK,CACnD,MAAME,MAAQR,EAAES,IAAI,CACpB,MAAMC,MAAQT,EAAEQ,IAAI,CAGpB,GAAIP,WAAaE,UAAW,CAC3B,MAAO,CAACO,GAAAA,gBAAS,EAACN,OAAQE,OAC3B,CAGA,GAAIL,WAAaU,MAAMC,OAAO,CAACH,OAAQ,CACtC,MAAO,CAACA,MAAMI,IAAI,CAAC,AAACC,GAAMJ,GAAAA,gBAAS,EAACI,EAAGV,QACxC,CACA,GAAID,WAAaQ,MAAMC,OAAO,CAACL,OAAQ,CACtC,MAAO,CAACA,MAAMM,IAAI,CAAC,AAACC,GAAMJ,GAAAA,gBAAS,EAACI,EAAGR,QACxC,CAEA,OAAO,KACR,CAGA,MAAMS,4BAA8B,CACnC,QACA,uBACA,WACA,gBACA,MACA,CAGD,MAAMC,6BAA+B,CACpC,aACA,oBACA,CAcD,SAASC,qBACRlB,CAAwB,CACxBC,CAAwB,EAExB,GAAIF,iBAAiBC,EAAGC,GAAI,OAAO,KAEnC,GAAI,OAAOD,IAAM,WAAa,OAAOC,IAAM,UAAW,OAAO,MAG7D,IAAK,MAAMkB,OAAOH,4BAA6B,CAC9C,MAAMI,KAAO,AAACpB,CAA6B,CAACmB,IAAI,CAGhD,MAAME,KAAO,AAACpB,CAA6B,CAACkB,IAAI,CAGhD,GACCG,GAAAA,iBAAU,EAACF,OACXE,GAAAA,iBAAU,EAACD,OACXH,qBACCE,KACAC,MAEA,CACD,OAAO,IACR,CACD,CAGA,IAAK,MAAMF,OAAOF,6BAA8B,CAC/C,MAAMM,KAAO,AAACvB,CAA6B,CAACmB,IAAI,CAGhD,MAAMK,KAAO,AAACvB,CAA6B,CAACkB,IAAI,CAGhD,GAAI,CAACG,GAAAA,iBAAU,EAACC,OAAS,CAACD,GAAAA,iBAAU,EAACE,MAAO,SAC5C,MAAMC,SAAWF,KACjB,MAAMG,SAAWF,KACjB,IAAK,MAAMG,WAAWC,OAAOC,IAAI,CAACJ,UAAW,CAC5C,MAAML,KAAOK,QAAQ,CAACE,QAAQ,CAC9B,MAAMN,KAAOK,QAAQ,CAACC,QAAQ,CAC9B,GACCP,OAASU,WACTT,OAASS,WACT3B,GAAAA,aAAM,EAACuB,SAAUC,UACjBT,qBAAqBE,KAAMC,MAC1B,CACD,OAAO,IACR,CACD,CACD,CAGA,GAAIT,MAAMC,OAAO,CAACb,EAAE+B,KAAK,GAAKnB,MAAMC,OAAO,CAACZ,EAAE8B,KAAK,EAAG,CACrD,MAAMC,OAAShC,EAAE+B,KAAK,CACtB,MAAME,OAAShC,EAAE8B,KAAK,CACtB,MAAMG,IAAMC,KAAKC,GAAG,CAACJ,OAAOK,MAAM,CAAEJ,OAAOI,MAAM,EACjD,IAAK,IAAIC,EAAI,EAAGA,EAAIJ,IAAKI,IAAK,CAC7B,MAAMC,MAAQP,MAAM,CAACM,EAAE,CACvB,MAAME,MAAQP,MAAM,CAACK,EAAE,CACvB,GAAIC,QAAUT,WAAaU,QAAUV,UAAW,SAChD,GAAIZ,qBAAqBqB,MAAOC,OAAQ,CACvC,OAAO,IACR,CACD,CACD,CAEA,OAAO,KACR,CAuCA,SAASC,gCACRzC,CAAwB,CACxBC,CAAwB,EAExB,GAAI,OAAOD,IAAM,WAAa,OAAOC,IAAM,UAAW,OAAO,MAE7D,MAAMyC,OAASpB,GAAAA,iBAAU,EAACtB,EAAE2C,UAAU,EAClC3C,EAAE2C,UAAU,CACbb,UACH,MAAMc,OAAStB,GAAAA,iBAAU,EAACrB,EAAE0C,UAAU,EAClC1C,EAAE0C,UAAU,CACbb,UAGH,GAAI,CAACY,QAAU,CAACE,OAAQ,OAAO,MAE/B,MAAMC,MAAQH,OAASd,OAAOC,IAAI,CAACa,QAAU,EAAE,CAC/C,MAAMI,MAAQF,OAAShB,OAAOC,IAAI,CAACe,QAAU,EAAE,CAC/C,MAAMG,UAAYnC,MAAMC,OAAO,CAACb,EAAEgD,QAAQ,EAAKhD,EAAEgD,QAAQ,CAAgB,EAAE,CAC3E,MAAMC,UAAYrC,MAAMC,OAAO,CAACZ,EAAE+C,QAAQ,EAAK/C,EAAE+C,QAAQ,CAAgB,EAAE,CAM3E,GAAIhD,EAAEkD,oBAAoB,GAAK,OAASR,QAAUE,OAAQ,CACzD,MAAMO,iBAAmBF,UAAUnC,IAAI,CACtC,AAACsC,GAAM,CAACjD,GAAAA,aAAM,EAACuC,OAAQU,IAAMjD,GAAAA,aAAM,EAACyC,OAAQQ,IAI7C,GAAID,kBAAoBN,MAAMR,MAAM,CAAG,EAAG,OAAO,IAClD,CAKA,GACCf,GAAAA,iBAAU,EAACtB,EAAEkD,oBAAoB,GACjC,OAAOlD,EAAEkD,oBAAoB,GAAK,WAClCR,QACAE,OACC,CACD,MAAMS,eAAiBrD,EAAEkD,oBAAoB,CAC7C,GAAI/C,GAAAA,aAAM,EAACkD,eAAgB,QAAS,CACnC,MAAMC,aAAeD,eAAeE,IAAI,CACxC,MAAMC,gBAAkBP,UAAUnC,IAAI,CAAC,AAACsC,IACvC,GAAIjD,GAAAA,aAAM,EAACuC,OAAQU,GAAI,OAAO,MAC9B,GAAI,CAACjD,GAAAA,aAAM,EAACyC,OAAQQ,GAAI,OAAO,MAC/B,MAAMK,SAAWb,MAAM,CAACQ,EAAE,CAC1B,GAAI,OAAOK,WAAa,UAAW,OAAO,MAC1C,MAAMC,MAAQD,SACd,GAAI,CAACtD,GAAAA,aAAM,EAACuD,MAAO,QAAS,OAAO,MACnC,GACC,OAAOJ,eAAiB,UACxB,OAAOI,MAAMH,IAAI,GAAK,SACrB,CACD,OACCD,eAAiBI,MAAMH,IAAI,EAC3B,CAAED,CAAAA,eAAiB,UAAYI,MAAMH,IAAI,GAAK,SAAQ,GACtD,CAAED,CAAAA,eAAiB,WAAaI,MAAMH,IAAI,GAAK,QAAO,CAExD,CACA,OAAO,KACR,GACA,GAAIC,gBAAiB,OAAO,IAC7B,CACD,CAGA,GAAIvD,EAAEiD,oBAAoB,GAAK,OAASN,QAAUF,OAAQ,CACzD,MAAMS,iBAAmBJ,UAAUjC,IAAI,CACtC,AAACsC,GAAM,CAACjD,GAAAA,aAAM,EAACyC,OAAQQ,IAAMjD,GAAAA,aAAM,EAACuC,OAAQU,IAE7C,GAAID,kBAAoBL,MAAMT,MAAM,CAAG,EAAG,OAAO,IAClD,CAGA,GACCf,GAAAA,iBAAU,EAACrB,EAAEiD,oBAAoB,GACjC,OAAOjD,EAAEiD,oBAAoB,GAAK,WAClCN,QACAF,OACC,CACD,MAAMW,eAAiBpD,EAAEiD,oBAAoB,CAC7C,GAAI/C,GAAAA,aAAM,EAACkD,eAAgB,QAAS,CACnC,MAAMC,aAAeD,eAAeE,IAAI,CACxC,MAAMC,gBAAkBT,UAAUjC,IAAI,CAAC,AAACsC,IACvC,GAAIjD,GAAAA,aAAM,EAACyC,OAAQQ,GAAI,OAAO,MAC9B,GAAI,CAACjD,GAAAA,aAAM,EAACuC,OAAQU,GAAI,OAAO,MAC/B,MAAMO,SAAWjB,MAAM,CAACU,EAAE,CAC1B,GAAI,OAAOO,WAAa,UAAW,OAAO,MAC1C,MAAMC,MAAQD,SACd,GAAI,CAACxD,GAAAA,aAAM,EAACyD,MAAO,QAAS,OAAO,MACnC,GACC,OAAON,eAAiB,UACxB,OAAOM,MAAML,IAAI,GAAK,SACrB,CACD,OACCD,eAAiBM,MAAML,IAAI,EAC3B,CAAED,CAAAA,eAAiB,UAAYM,MAAML,IAAI,GAAK,SAAQ,GACtD,CAAED,CAAAA,eAAiB,WAAaM,MAAML,IAAI,GAAK,QAAO,CAExD,CACA,OAAO,KACR,GACA,GAAIC,gBAAiB,OAAO,IAC7B,CACD,CAKA,GAAId,QAAUE,OAAQ,CACrB,IAAK,MAAMQ,KAAKP,MAAO,CACtB,GAAI,CAAC1C,GAAAA,aAAM,EAACyC,OAAQQ,GAAI,SACxB,MAAMO,SAAWjB,MAAM,CAACU,EAAE,CAC1B,MAAMK,SAAWb,MAAM,CAACQ,EAAE,CAC1B,GAAI,OAAOO,WAAa,WAAa,OAAOF,WAAa,UACxD,SACD,GACChB,gCACCkB,SACAF,UAEA,CACD,OAAO,IACR,CACD,CACD,CAEA,OAAO,KACR,CAsBA,SAASI,kBACR7D,CAAwB,CACxBC,CAAwB,EAExB,GAAI,OAAOD,IAAM,WAAa,OAAOC,IAAM,UAAW,OAAO,MAI7D,GAAIE,GAAAA,aAAM,EAACH,EAAG,WAAaG,GAAAA,aAAM,EAACF,EAAG,UAAW,CAC/C,MAAM6D,QAAU9D,EAAE+D,MAAM,CACxB,MAAMC,QAAU/D,EAAE8D,MAAM,CAGxB,GAAID,UAAYE,QAAS,CAExB,MAAMC,YAAcC,GAAAA,+BAAc,EAACJ,QAASE,SAC5C,GAAIC,cAAgB,KAAM,CACzB,MAAME,aAAeD,GAAAA,+BAAc,EAACF,QAASF,SAC7C,GAAIK,eAAiB,KAAM,CAE1B,OAAO,IACR,CACD,CACD,CACD,CAIA,GAAI7C,GAAAA,iBAAU,EAACtB,EAAE2C,UAAU,GAAKrB,GAAAA,iBAAU,EAACrB,EAAE0C,UAAU,EAAG,CACzD,MAAMpB,KAAOvB,EAAE2C,UAAU,CACzB,MAAMnB,KAAOvB,EAAE0C,UAAU,CACzB,IAAK,MAAMS,KAAKxB,OAAOC,IAAI,CAACN,MAAO,CAClC,MAAMH,KAAOG,IAAI,CAAC6B,EAAE,CACpB,MAAM/B,KAAOG,IAAI,CAAC4B,EAAE,CACpB,GACChC,OAASU,WACTT,OAASS,WACT3B,GAAAA,aAAM,EAACqB,KAAM4B,IACbS,kBAAkBzC,KAAMC,MACvB,CACD,OAAO,IACR,CACD,CACD,CAGA,GAAIC,GAAAA,iBAAU,EAACtB,EAAE+B,KAAK,GAAKT,GAAAA,iBAAU,EAACrB,EAAE8B,KAAK,EAAG,CAC/C,GACC8B,kBACC7D,EAAE+B,KAAK,CACP9B,EAAE8B,KAAK,EAGR,OAAO,IACT,CAGA,GACCT,GAAAA,iBAAU,EAACtB,EAAEkD,oBAAoB,GACjC5B,GAAAA,iBAAU,EAACrB,EAAEiD,oBAAoB,EAChC,CACD,GACCW,kBACC7D,EAAEkD,oBAAoB,CACtBjD,EAAEiD,oBAAoB,EAGvB,OAAO,IACT,CAEA,OAAO,KACR,CAIO,MAAMpD,YA6CZsE,MACCpE,CAAwB,CACxBC,CAAwB,CACO,CAE/B,GAAIiB,qBAAqBlB,EAAGC,GAAI,CAC/B,OAAO,IACR,CAGA,GAAI4D,kBAAkB7D,EAAGC,GAAI,CAC5B,OAAO,IACR,CAOA,GAAIwC,gCAAgCzC,EAAGC,GAAI,CAC1C,OAAO,IACR,CAEA,GAAI,CACH,OAAO,IAAI,CAACoE,mBAAmB,CAAC,CAAEC,MAAO,CAACtE,EAAGC,EAAE,AAAC,EACjD,CAAE,KAAM,CACP,OAAO,IACR,CACD,CAQAsE,aACCvE,CAAwB,CACxBC,CAAwB,CACA,CAExB,GAAIiB,qBAAqBlB,EAAGC,GAAI,CAC/B,MAAM,IAAIuE,MACT,wEAEF,CAGA,GAAIX,kBAAkB7D,EAAGC,GAAI,CAC5B,MAAM,IAAIuE,MACT,0EAEF,CAGA,GAAI/B,gCAAgCzC,EAAGC,GAAI,CAC1C,MAAM,IAAIuE,MACT,uGAEF,CAEA,OAAO,IAAI,CAACH,mBAAmB,CAAC,CAAEC,MAAO,CAACtE,EAAGC,EAAE,AAAC,EACjD,CAMAwE,QAAQzE,CAAwB,CAAEC,CAAwB,CAAU,CACnE,OAAO,IAAI,CAACyE,SAAS,CAAC1E,EAAGC,EAC1B,CAKA0E,QAAQ3E,CAAwB,CAAEC,CAAwB,CAAW,CACpE,OAAO,IAAI,CAACyE,SAAS,CAAC1E,EAAGC,KAAO,CACjC,CAhHA,aAAc,CATd,sBAAiByE,YAAjB,KAAA,GAKA,sBAAiBL,sBAAjB,KAAA,GAKC,KAAM,CAAEO,wBAAwB,CAAEC,mBAAmB,CAAE,CACtDC,GAAAA,iCAAgB,IAOjB,MAAMC,wBAA0B,CAC/B/E,EACAC,KAEA,GAAID,IAAM,MAAQC,IAAM,KAAM,OAAO,EACrC,OAAO4E,oBAAoB7E,EAAGC,EAC/B,EAEA,KAAM,CAAE+E,6BAA6B,CAAE,CAAGC,GAAAA,6BAAY,EAAC,CACtDC,cAAeC,GAAAA,wBAAiB,EAACJ,yBACjCK,yBAA0BC,GAAAA,yBAAkB,EAACT,yBAC9C,EAEA,CAAA,IAAI,CAACF,SAAS,CAAGE,wBACjB,CAAA,IAAI,CAACP,mBAAmB,CAAGiB,GAAAA,wCAAuB,EACjDN,8BAEF,CAuFD"}
|
|
@@ -0,0 +1,2 @@
|
|
|
1
|
+
"use strict";Object.defineProperty(exports,"__esModule",{value:true});function _export(target,all){for(var name in all)Object.defineProperty(target,name,{enumerable:true,get:Object.getOwnPropertyDescriptor(all,name).get})}_export(exports,{get inferType(){return inferType},get normalize(){return normalize}});const _utils=require("./utils.js");const normalizeCache=new WeakMap;function inferType(value){if(value===null)return"null";switch(typeof value){case"string":return"string";case"number":return Number.isInteger(value)?"integer":"number";case"boolean":return"boolean";case"object":return Array.isArray(value)?"array":"object";default:return undefined}}const SINGLE_SCHEMA_KEYWORDS=["additionalProperties","additionalItems","contains","propertyNames","not","if","then","else"];const METADATA_KEYWORDS=new Set(["$id","$schema","$comment","title","description","default","examples","definitions","$defs"]);function isPureNotSchema(schema){const schemaKeys=Object.keys(schema);return schemaKeys.every(k=>k==="not"||METADATA_KEYWORDS.has(k))}const ARRAY_SCHEMA_KEYWORDS=["anyOf","oneOf","allOf"];const PROPERTIES_LIKE_KEYWORDS=["properties","patternProperties"];function normalizePropertiesMap(props){const keys=Object.keys(props);let changed=false;for(let i=0;i<keys.length;i++){const key=keys[i];if(key===undefined)continue;const original=props[key];const normalized=normalize(original);if(normalized!==original){changed=true;break}}if(!changed)return props;const result={};for(let i=0;i<keys.length;i++){const key=keys[i];if(key===undefined)continue;result[key]=normalize(props[key])}return result}function inferTypeFromConst(schema){if(!(0,_utils.hasOwn)(schema,"const")||schema.type!==undefined)return undefined;const t=inferType(schema.const);return t?t:undefined}function inferTypeFromEnum(schema){if(!Array.isArray(schema.enum)||schema.type!==undefined)return undefined;const typesSet=new Set;for(const v of schema.enum){const t=inferType(v);if(t)typesSet.add(t)}const count=typesSet.size;if(count===0)return undefined;const types=Array.from(typesSet);if(count===1)return types[0];return types}function normalize(def){if(typeof def==="boolean")return def;const cached=normalizeCache.get(def);if(cached!==undefined)return cached;let schema=def;let copied=false;function ensureCopy(){if(!copied){schema={...def};copied=true}return schema}const typeFromConst=inferTypeFromConst(schema);if(typeFromConst){ensureCopy().type=typeFromConst}const typeFromEnum=inferTypeFromEnum(schema);if(typeFromEnum){ensureCopy().type=typeFromEnum}if(Array.isArray(schema.enum)&&schema.enum.length===1&&!(0,_utils.hasOwn)(schema,"const")){const s=ensureCopy();s.const=schema.enum[0];delete s.enum}if((0,_utils.hasOwn)(schema,"const")&&Array.isArray(schema.enum)){if(schema.enum.some(v=>(0,_utils.deepEqual)(v,schema.const))){delete ensureCopy().enum}}for(const keyword of PROPERTIES_LIKE_KEYWORDS){const val=schema[keyword];if((0,_utils.isPlainObj)(val)){const normalized=normalizePropertiesMap(val);if(normalized!==val){ensureCopy()[keyword]=normalized}}}if((0,_utils.isPlainObj)(schema.dependencies)){const deps=schema.dependencies;const depsKeys=Object.keys(deps);let depsChanged=false;const newDeps={};for(let i=0;i<depsKeys.length;i++){const key=depsKeys[i];if(key===undefined)continue;const val=deps[key];if(val===undefined)continue;if(Array.isArray(val)){newDeps[key]=val}else if((0,_utils.isPlainObj)(val)){const normalized=normalize(val);newDeps[key]=normalized;if(normalized!==val)depsChanged=true}else{newDeps[key]=val}}if(depsChanged){ensureCopy().dependencies=newDeps}}if(schema.items){if(Array.isArray(schema.items)){const items=schema.items;let itemsChanged=false;const newItems=new Array(items.length);for(let i=0;i<items.length;i++){const original=items[i];if(original===undefined)continue;const normalized=normalize(original);newItems[i]=normalized;if(normalized!==original)itemsChanged=true}if(itemsChanged){ensureCopy().items=newItems}}else if((0,_utils.isPlainObj)(schema.items)){const normalized=normalize(schema.items);if(normalized!==schema.items){ensureCopy().items=normalized}}}for(const key of SINGLE_SCHEMA_KEYWORDS){const val=schema[key];if(val!==undefined&&typeof val!=="boolean"){const normalized=normalize(val);if(normalized!==val){ensureCopy()[key]=normalized}}}if((0,_utils.hasOwn)(schema,"not")&&(0,_utils.isPlainObj)(schema.not)&&typeof schema.not!=="boolean"){const notSchema=schema.not;if((0,_utils.hasOwn)(notSchema,"not")&&isPureNotSchema(notSchema)&&(0,_utils.isPlainObj)(notSchema.not)&&typeof notSchema.not!=="boolean"){const innerSchema=notSchema.not;const s=ensureCopy();delete s.not;const innerKeys=Object.keys(innerSchema);for(let i=0;i<innerKeys.length;i++){const ik=innerKeys[i];if(ik===undefined)continue;s[ik]=innerSchema[ik]}}}for(const key of ARRAY_SCHEMA_KEYWORDS){const val=schema[key];if(Array.isArray(val)){const arr=val;let arrChanged=false;const newArr=new Array(arr.length);for(let i=0;i<arr.length;i++){const original=arr[i];if(original===undefined)continue;const normalized=normalize(original);newArr[i]=normalized;if(normalized!==original)arrChanged=true}if(arrChanged){ensureCopy()[key]=newArr}}}const result=copied?schema:def;normalizeCache.set(def,result);return result}
|
|
2
|
+
//# sourceMappingURL=normalizer.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/normalizer.ts"],"sourcesContent":["import type { JSONSchema7, JSONSchema7Definition } from \"json-schema\";\nimport { deepEqual, hasOwn, isPlainObj } from \"./utils\";\n\n// ─── Schema Normalizer ───────────────────────────────────────────────────────\n//\n// Fonctions pures pour normaliser un JSON Schema :\n// - Inférer `type` depuis `const` ou `enum`\n// - Récurser dans toutes les sous-structures (properties, items, anyOf, etc.)\n// - Résoudre la double négation `not.not` → aplatir en contenu direct\n// - Récurser dans `patternProperties` (Point 2)\n// - Récurser dans `dependencies` forme schema (Point 3)\n//\n// Optimisations :\n// - WeakMap cache pour éviter de re-normaliser le même objet\n// - Lazy copy-on-write : ne crée une copie que si des mutations sont nécessaires\n// - Retourne l'original si rien n'a changé (évite les allocations)\n\n// ─── Normalization Cache ─────────────────────────────────────────────────────\n\n/**\n * Cache WeakMap pour les résultats de normalisation.\n * Évite de re-normaliser le même objet schema plusieurs fois.\n * WeakMap permet au GC de collecter les schemas qui ne sont plus référencés.\n */\nconst normalizeCache = new WeakMap<object, JSONSchema7Definition>();\n\n// ─── Type inference ──────────────────────────────────────────────────────────\n\n/**\n * Infère le type JSON Schema d'une valeur JavaScript.\n */\nexport function inferType(value: unknown): string | undefined {\n\tif (value === null) return \"null\";\n\tswitch (typeof value) {\n\t\tcase \"string\":\n\t\t\treturn \"string\";\n\t\tcase \"number\":\n\t\t\treturn Number.isInteger(value) ? \"integer\" : \"number\";\n\t\tcase \"boolean\":\n\t\t\treturn \"boolean\";\n\t\tcase \"object\":\n\t\t\treturn Array.isArray(value) ? \"array\" : \"object\";\n\t\tdefault:\n\t\t\treturn undefined;\n\t}\n}\n\n// ─── Sub-schema keywords ─────────────────────────────────────────────────────\n\n/** Mots-clés contenant un unique sous-schema */\nconst SINGLE_SCHEMA_KEYWORDS = [\n\t\"additionalProperties\",\n\t\"additionalItems\",\n\t\"contains\",\n\t\"propertyNames\",\n\t\"not\",\n\t\"if\",\n\t\"then\",\n\t\"else\",\n] as const;\n\n/**\n * Vérifie si un schema ne contient qu'un seul mot-clé `not` (et aucun\n * autre mot-clé significatif). Utilisé pour la résolution de double négation.\n *\n * Un schema « pur not » est de la forme `{ not: X }` sans aucune autre\n * contrainte. Dans ce cas, `{ not: { not: Y } }` ≡ `Y`.\n *\n * Les mots-clés de métadonnée (`$id`, `$schema`, `$comment`, `title`,\n * `description`, `default`, `examples`, `definitions`, `$defs`) ne sont\n * PAS considérés comme significatifs pour cette détection.\n */\nconst METADATA_KEYWORDS = new Set([\n\t\"$id\",\n\t\"$schema\",\n\t\"$comment\",\n\t\"title\",\n\t\"description\",\n\t\"default\",\n\t\"examples\",\n\t\"definitions\",\n\t\"$defs\",\n]);\n\n/**\n * Vérifie si un objet schema ne contient que le mot-clé `not`\n * (plus éventuellement des métadonnées non significatives).\n */\nfunction isPureNotSchema(schema: JSONSchema7): boolean {\n\tconst schemaKeys = Object.keys(schema);\n\treturn schemaKeys.every((k) => k === \"not\" || METADATA_KEYWORDS.has(k));\n}\n\n/** Mots-clés contenant un tableau de sous-schemas */\nconst ARRAY_SCHEMA_KEYWORDS = [\"anyOf\", \"oneOf\", \"allOf\"] as const;\n\n/**\n * Mots-clés contenant un Record<string, JSONSchema7Definition>\n * (chaque valeur est un sous-schema à normaliser récursivement).\n */\nconst PROPERTIES_LIKE_KEYWORDS = [\"properties\", \"patternProperties\"] as const;\n\n// ─── Internal helpers ────────────────────────────────────────────────────────\n\n/**\n * Normalise un `Record<string, JSONSchema7Definition>` en appliquant\n * `normalize` à chaque valeur.\n * Retourne l'objet original si rien n'a changé (évite les allocations).\n */\nfunction normalizePropertiesMap(\n\tprops: Record<string, JSONSchema7Definition>,\n): Record<string, JSONSchema7Definition> {\n\tconst keys = Object.keys(props);\n\tlet changed = false;\n\n\t// First pass: detect if anything changes (sub-schemas get cached)\n\tfor (let i = 0; i < keys.length; i++) {\n\t\tconst key = keys[i];\n\t\tif (key === undefined) continue;\n\t\tconst original = props[key];\n\t\tconst normalized = normalize(original as JSONSchema7Definition);\n\t\tif (normalized !== original) {\n\t\t\tchanged = true;\n\t\t\tbreak;\n\t\t}\n\t}\n\n\tif (!changed) return props;\n\n\t// Build result only when something changed (sub normalize calls hit cache)\n\tconst result: Record<string, JSONSchema7Definition> = {};\n\tfor (let i = 0; i < keys.length; i++) {\n\t\tconst key = keys[i];\n\t\tif (key === undefined) continue;\n\t\tresult[key] = normalize(props[key] as JSONSchema7Definition);\n\t}\n\n\treturn result;\n}\n\n/**\n * Infère le `type` depuis `const` si absent.\n * Retourne le type inféré ou undefined si non applicable.\n */\nfunction inferTypeFromConst(\n\tschema: JSONSchema7,\n): JSONSchema7[\"type\"] | undefined {\n\tif (!hasOwn(schema, \"const\") || schema.type !== undefined) return undefined;\n\tconst t = inferType(schema.const);\n\treturn t ? (t as JSONSchema7[\"type\"]) : undefined;\n}\n\n/**\n * Infère le `type` depuis `enum` si absent.\n * Retourne le type inféré (single ou array) ou undefined si non applicable.\n */\nfunction inferTypeFromEnum(\n\tschema: JSONSchema7,\n): JSONSchema7[\"type\"] | undefined {\n\tif (!Array.isArray(schema.enum) || schema.type !== undefined)\n\t\treturn undefined;\n\n\tconst typesSet = new Set<string>();\n\tfor (const v of schema.enum) {\n\t\tconst t = inferType(v);\n\t\tif (t) typesSet.add(t);\n\t}\n\n\tconst count = typesSet.size;\n\tif (count === 0) return undefined;\n\n\tconst types = Array.from(typesSet);\n\tif (count === 1) return types[0] as JSONSchema7[\"type\"];\n\treturn types as JSONSchema7[\"type\"];\n}\n\n// ─── Normalization ───────────────────────────────────────────────────────────\n\n/**\n * Normalise un schema : infère `type` depuis `const`/`enum`,\n * et normalise récursivement tous les sous-schemas.\n *\n * Récurse dans :\n * - `properties` et `patternProperties` (Point 2)\n * - `dependencies` forme schema (Point 3) — les valeurs tableau (forme 1)\n * sont laissées intactes\n * - `items` (single ou tuple)\n * - Mots-clés single-schema (`additionalProperties`, `not`, `if`, etc.)\n * - Mots-clés array-of-schema (`anyOf`, `oneOf`, `allOf`)\n *\n * Optimisations :\n * - WeakMap cache : retourne le résultat mis en cache en O(1)\n * - Lazy copy-on-write : ne crée une copie shallow que quand la première\n * mutation est nécessaire, via `ensureCopy()`\n * - Les sous-structures ne sont remplacées que si effectivement changées\n */\nexport function normalize(def: JSONSchema7Definition): JSONSchema7Definition {\n\tif (typeof def === \"boolean\") return def;\n\n\t// ── Cache lookup (O(1) fast path) ──\n\tconst cached = normalizeCache.get(def);\n\tif (cached !== undefined) return cached;\n\n\t// ── Lazy copy-on-write ──\n\t// We delay creating a shallow copy until the first actual mutation.\n\t// `schema` starts as `def` and only becomes a copy when `ensureCopy()` is called.\n\tlet schema = def as JSONSchema7 & Record<string, unknown>;\n\tlet copied = false;\n\n\tfunction ensureCopy(): JSONSchema7 & Record<string, unknown> {\n\t\tif (!copied) {\n\t\t\tschema = { ...(def as JSONSchema7) } as JSONSchema7 &\n\t\t\t\tRecord<string, unknown>;\n\t\t\tcopied = true;\n\t\t}\n\t\treturn schema;\n\t}\n\n\t// ── Inférer type depuis const ──\n\tconst typeFromConst = inferTypeFromConst(schema);\n\tif (typeFromConst) {\n\t\tensureCopy().type = typeFromConst;\n\t}\n\n\t// ── Inférer type depuis enum ──\n\tconst typeFromEnum = inferTypeFromEnum(schema);\n\tif (typeFromEnum) {\n\t\tensureCopy().type = typeFromEnum;\n\t}\n\n\t// ── Convertir enum à un seul élément en const ──\n\t// Sémantiquement, { enum: [X] } ≡ { const: X }.\n\t// Cette normalisation garantit que la comparaison structurelle\n\t// (isEqual) ne produit pas de faux négatifs quand un schema utilise\n\t// enum et l'autre utilise const pour la même valeur.\n\tif (\n\t\tArray.isArray(schema.enum) &&\n\t\tschema.enum.length === 1 &&\n\t\t!hasOwn(schema, \"const\")\n\t) {\n\t\tconst s = ensureCopy();\n\t\ts.const = schema.enum[0];\n\t\tdelete s.enum;\n\t}\n\n\t// ── Strip redundant enum when const is present ──\n\t// Si `const: X` et `enum: [... X ...]` coexistent, `const` est plus\n\t// restrictif → `enum` est redondant. Le merge engine peut produire\n\t// cette combinaison lors de l'intersection const ∩ enum.\n\tif (hasOwn(schema, \"const\") && Array.isArray(schema.enum)) {\n\t\tif (schema.enum.some((v) => deepEqual(v, schema.const))) {\n\t\t\tdelete ensureCopy().enum;\n\t\t}\n\t}\n\n\t// ── Récurser dans properties & patternProperties (Point 2) ──\n\tfor (const keyword of PROPERTIES_LIKE_KEYWORDS) {\n\t\tconst val = schema[keyword];\n\t\tif (isPlainObj(val)) {\n\t\t\tconst normalized = normalizePropertiesMap(\n\t\t\t\tval as Record<string, JSONSchema7Definition>,\n\t\t\t);\n\t\t\tif (normalized !== val) {\n\t\t\t\tensureCopy()[keyword] = normalized as JSONSchema7[\"properties\"];\n\t\t\t}\n\t\t}\n\t}\n\n\t// ── Récurser dans dependencies (Point 3) ──\n\t// `dependencies` peut contenir :\n\t// - Forme 1 (property deps) : { foo: [\"bar\", \"baz\"] } → tableau de strings, on skip\n\t// - Forme 2 (schema deps) : { foo: { required: [...] } } → objet schema, on normalise\n\tif (isPlainObj(schema.dependencies)) {\n\t\tconst deps = schema.dependencies as Record<\n\t\t\tstring,\n\t\t\tJSONSchema7Definition | string[]\n\t\t>;\n\t\tconst depsKeys = Object.keys(deps);\n\t\tlet depsChanged = false;\n\t\tconst newDeps: Record<string, JSONSchema7Definition | string[]> = {};\n\n\t\tfor (let i = 0; i < depsKeys.length; i++) {\n\t\t\tconst key = depsKeys[i];\n\t\t\tif (key === undefined) continue;\n\t\t\tconst val = deps[key];\n\t\t\tif (val === undefined) continue;\n\t\t\tif (Array.isArray(val)) {\n\t\t\t\t// Forme 1 : tableau de strings → laisser tel quel\n\t\t\t\tnewDeps[key] = val;\n\t\t\t} else if (isPlainObj(val)) {\n\t\t\t\t// Forme 2 : sous-schema → normaliser récursivement\n\t\t\t\tconst normalized = normalize(val as JSONSchema7Definition);\n\t\t\t\tnewDeps[key] = normalized;\n\t\t\t\tif (normalized !== val) depsChanged = true;\n\t\t\t} else {\n\t\t\t\tnewDeps[key] = val as JSONSchema7Definition;\n\t\t\t}\n\t\t}\n\n\t\tif (depsChanged) {\n\t\t\tensureCopy().dependencies = newDeps;\n\t\t}\n\t}\n\n\t// ── Récurser dans items (tuple ou single) ──\n\tif (schema.items) {\n\t\tif (Array.isArray(schema.items)) {\n\t\t\t// Tuple : normaliser chaque élément\n\t\t\tconst items = schema.items as JSONSchema7Definition[];\n\t\t\tlet itemsChanged = false;\n\t\t\tconst newItems: JSONSchema7Definition[] = new Array(items.length);\n\n\t\t\tfor (let i = 0; i < items.length; i++) {\n\t\t\t\tconst original = items[i];\n\t\t\t\tif (original === undefined) continue;\n\t\t\t\tconst normalized = normalize(original);\n\t\t\t\tnewItems[i] = normalized;\n\t\t\t\tif (normalized !== original) itemsChanged = true;\n\t\t\t}\n\n\t\t\tif (itemsChanged) {\n\t\t\t\tensureCopy().items = newItems;\n\t\t\t}\n\t\t} else if (isPlainObj(schema.items)) {\n\t\t\t// Single items schema\n\t\t\tconst normalized = normalize(schema.items as JSONSchema7Definition);\n\t\t\tif (normalized !== schema.items) {\n\t\t\t\tensureCopy().items = normalized;\n\t\t\t}\n\t\t}\n\t}\n\n\t// ── Récurser dans les mots-clés single-schema ──\n\tfor (const key of SINGLE_SCHEMA_KEYWORDS) {\n\t\tconst val = schema[key];\n\t\tif (val !== undefined && typeof val !== \"boolean\") {\n\t\t\tconst normalized = normalize(val as JSONSchema7Definition);\n\t\t\tif (normalized !== val) {\n\t\t\t\t(ensureCopy() as Record<string, JSONSchema7Definition>)[key] =\n\t\t\t\t\tnormalized;\n\t\t\t}\n\t\t}\n\t}\n\n\t// ── Résoudre la double négation not(not(X)) → X ──\n\t// Après la récursion dans les sous-schemas, `schema.not` est normalisé.\n\t// Si `schema.not` est un objet qui ne contient QUE `not` (un « pur not »),\n\t// alors `{ ...rest, not: { not: X } }` ≡ `{ ...rest, ...X }`.\n\t//\n\t// Logique propositionnelle : ¬¬P ≡ P\n\t//\n\t// On ne résout que le cas « pur » (schema.not n'a que `not` comme clé\n\t// significative) pour éviter les faux-positifs dans les cas complexes.\n\tif (\n\t\thasOwn(schema, \"not\") &&\n\t\tisPlainObj(schema.not) &&\n\t\ttypeof schema.not !== \"boolean\"\n\t) {\n\t\tconst notSchema = schema.not as JSONSchema7;\n\t\tif (\n\t\t\thasOwn(notSchema, \"not\") &&\n\t\t\tisPureNotSchema(notSchema) &&\n\t\t\tisPlainObj(notSchema.not) &&\n\t\t\ttypeof notSchema.not !== \"boolean\"\n\t\t) {\n\t\t\t// Extraire le contenu de not.not et le fusionner avec le reste du schema\n\t\t\tconst innerSchema = notSchema.not as JSONSchema7;\n\t\t\tconst s = ensureCopy();\n\t\t\t// Retirer `not` du schema courant\n\t\t\tdelete s.not;\n\t\t\t// Fusionner le contenu interne dans le schema courant\n\t\t\tconst innerKeys = Object.keys(innerSchema);\n\t\t\tfor (let i = 0; i < innerKeys.length; i++) {\n\t\t\t\tconst ik = innerKeys[i];\n\t\t\t\tif (ik === undefined) continue;\n\t\t\t\t(s as Record<string, unknown>)[ik] = (\n\t\t\t\t\tinnerSchema as Record<string, unknown>\n\t\t\t\t)[ik];\n\t\t\t}\n\t\t}\n\t}\n\n\t// ── Récurser dans les mots-clés array-of-schema ──\n\tfor (const key of ARRAY_SCHEMA_KEYWORDS) {\n\t\tconst val = schema[key];\n\t\tif (Array.isArray(val)) {\n\t\t\tconst arr = val as JSONSchema7Definition[];\n\t\t\tlet arrChanged = false;\n\t\t\tconst newArr: JSONSchema7Definition[] = new Array(arr.length);\n\n\t\t\tfor (let i = 0; i < arr.length; i++) {\n\t\t\t\tconst original = arr[i];\n\t\t\t\tif (original === undefined) continue;\n\t\t\t\tconst normalized = normalize(original);\n\t\t\t\tnewArr[i] = normalized;\n\t\t\t\tif (normalized !== original) arrChanged = true;\n\t\t\t}\n\n\t\t\tif (arrChanged) {\n\t\t\t\tensureCopy()[key] = newArr;\n\t\t\t}\n\t\t}\n\t}\n\n\t// ── Determine result ──\n\t// If nothing changed (copied === false), return the original def.\n\t// Otherwise, return the mutated copy.\n\tconst result = (copied ? schema : def) as JSONSchema7Definition;\n\n\t// ── Cache the result ──\n\tnormalizeCache.set(def, result);\n\n\treturn result;\n}\n"],"names":["inferType","normalize","normalizeCache","WeakMap","value","Number","isInteger","Array","isArray","undefined","SINGLE_SCHEMA_KEYWORDS","METADATA_KEYWORDS","Set","isPureNotSchema","schema","schemaKeys","Object","keys","every","k","has","ARRAY_SCHEMA_KEYWORDS","PROPERTIES_LIKE_KEYWORDS","normalizePropertiesMap","props","changed","i","length","key","original","normalized","result","inferTypeFromConst","hasOwn","type","t","const","inferTypeFromEnum","enum","typesSet","v","add","count","size","types","from","def","cached","get","copied","ensureCopy","typeFromConst","typeFromEnum","s","some","deepEqual","keyword","val","isPlainObj","dependencies","deps","depsKeys","depsChanged","newDeps","items","itemsChanged","newItems","not","notSchema","innerSchema","innerKeys","ik","arr","arrChanged","newArr","set"],"mappings":"mPA+BgBA,mBAAAA,eAqKAC,mBAAAA,kCAnM8B,WAuB9C,MAAMC,eAAiB,IAAIC,QAOpB,SAASH,UAAUI,KAAc,EACvC,GAAIA,QAAU,KAAM,MAAO,OAC3B,OAAQ,OAAOA,OACd,IAAK,SACJ,MAAO,QACR,KAAK,SACJ,OAAOC,OAAOC,SAAS,CAACF,OAAS,UAAY,QAC9C,KAAK,UACJ,MAAO,SACR,KAAK,SACJ,OAAOG,MAAMC,OAAO,CAACJ,OAAS,QAAU,QACzC,SACC,OAAOK,SACT,CACD,CAKA,MAAMC,uBAAyB,CAC9B,uBACA,kBACA,WACA,gBACA,MACA,KACA,OACA,OACA,CAaD,MAAMC,kBAAoB,IAAIC,IAAI,CACjC,MACA,UACA,WACA,QACA,cACA,UACA,WACA,cACA,QACA,EAMD,SAASC,gBAAgBC,MAAmB,EAC3C,MAAMC,WAAaC,OAAOC,IAAI,CAACH,QAC/B,OAAOC,WAAWG,KAAK,CAAC,AAACC,GAAMA,IAAM,OAASR,kBAAkBS,GAAG,CAACD,GACrE,CAGA,MAAME,sBAAwB,CAAC,QAAS,QAAS,QAAQ,CAMzD,MAAMC,yBAA2B,CAAC,aAAc,oBAAoB,CASpE,SAASC,uBACRC,KAA4C,EAE5C,MAAMP,KAAOD,OAAOC,IAAI,CAACO,OACzB,IAAIC,QAAU,MAGd,IAAK,IAAIC,EAAI,EAAGA,EAAIT,KAAKU,MAAM,CAAED,IAAK,CACrC,MAAME,IAAMX,IAAI,CAACS,EAAE,CACnB,GAAIE,MAAQnB,UAAW,SACvB,MAAMoB,SAAWL,KAAK,CAACI,IAAI,CAC3B,MAAME,WAAa7B,UAAU4B,UAC7B,GAAIC,aAAeD,SAAU,CAC5BJ,QAAU,KACV,KACD,CACD,CAEA,GAAI,CAACA,QAAS,OAAOD,MAGrB,MAAMO,OAAgD,CAAC,EACvD,IAAK,IAAIL,EAAI,EAAGA,EAAIT,KAAKU,MAAM,CAAED,IAAK,CACrC,MAAME,IAAMX,IAAI,CAACS,EAAE,CACnB,GAAIE,MAAQnB,UAAW,QACvBsB,CAAAA,MAAM,CAACH,IAAI,CAAG3B,UAAUuB,KAAK,CAACI,IAAI,CACnC,CAEA,OAAOG,MACR,CAMA,SAASC,mBACRlB,MAAmB,EAEnB,GAAI,CAACmB,GAAAA,aAAM,EAACnB,OAAQ,UAAYA,OAAOoB,IAAI,GAAKzB,UAAW,OAAOA,UAClE,MAAM0B,EAAInC,UAAUc,OAAOsB,KAAK,EAChC,OAAOD,EAAKA,EAA4B1B,SACzC,CAMA,SAAS4B,kBACRvB,MAAmB,EAEnB,GAAI,CAACP,MAAMC,OAAO,CAACM,OAAOwB,IAAI,GAAKxB,OAAOoB,IAAI,GAAKzB,UAClD,OAAOA,UAER,MAAM8B,SAAW,IAAI3B,IACrB,IAAK,MAAM4B,KAAK1B,OAAOwB,IAAI,CAAE,CAC5B,MAAMH,EAAInC,UAAUwC,GACpB,GAAIL,EAAGI,SAASE,GAAG,CAACN,EACrB,CAEA,MAAMO,MAAQH,SAASI,IAAI,CAC3B,GAAID,QAAU,EAAG,OAAOjC,UAExB,MAAMmC,MAAQrC,MAAMsC,IAAI,CAACN,UACzB,GAAIG,QAAU,EAAG,OAAOE,KAAK,CAAC,EAAE,CAChC,OAAOA,KACR,CAsBO,SAAS3C,UAAU6C,GAA0B,EACnD,GAAI,OAAOA,MAAQ,UAAW,OAAOA,IAGrC,MAAMC,OAAS7C,eAAe8C,GAAG,CAACF,KAClC,GAAIC,SAAWtC,UAAW,OAAOsC,OAKjC,IAAIjC,OAASgC,IACb,IAAIG,OAAS,MAEb,SAASC,aACR,GAAI,CAACD,OAAQ,CACZnC,OAAS,CAAE,GAAIgC,GAAG,AAAiB,EAEnCG,OAAS,IACV,CACA,OAAOnC,MACR,CAGA,MAAMqC,cAAgBnB,mBAAmBlB,QACzC,GAAIqC,cAAe,CAClBD,aAAahB,IAAI,CAAGiB,aACrB,CAGA,MAAMC,aAAef,kBAAkBvB,QACvC,GAAIsC,aAAc,CACjBF,aAAahB,IAAI,CAAGkB,YACrB,CAOA,GACC7C,MAAMC,OAAO,CAACM,OAAOwB,IAAI,GACzBxB,OAAOwB,IAAI,CAACX,MAAM,GAAK,GACvB,CAACM,GAAAA,aAAM,EAACnB,OAAQ,SACf,CACD,MAAMuC,EAAIH,YACVG,CAAAA,EAAEjB,KAAK,CAAGtB,OAAOwB,IAAI,CAAC,EAAE,AACxB,QAAOe,EAAEf,IAAI,AACd,CAMA,GAAIL,GAAAA,aAAM,EAACnB,OAAQ,UAAYP,MAAMC,OAAO,CAACM,OAAOwB,IAAI,EAAG,CAC1D,GAAIxB,OAAOwB,IAAI,CAACgB,IAAI,CAAC,AAACd,GAAMe,GAAAA,gBAAS,EAACf,EAAG1B,OAAOsB,KAAK,GAAI,CACxD,OAAOc,aAAaZ,IAAI,AACzB,CACD,CAGA,IAAK,MAAMkB,WAAWlC,yBAA0B,CAC/C,MAAMmC,IAAM3C,MAAM,CAAC0C,QAAQ,CAC3B,GAAIE,GAAAA,iBAAU,EAACD,KAAM,CACpB,MAAM3B,WAAaP,uBAClBkC,KAED,GAAI3B,aAAe2B,IAAK,CACvBP,YAAY,CAACM,QAAQ,CAAG1B,UACzB,CACD,CACD,CAMA,GAAI4B,GAAAA,iBAAU,EAAC5C,OAAO6C,YAAY,EAAG,CACpC,MAAMC,KAAO9C,OAAO6C,YAAY,CAIhC,MAAME,SAAW7C,OAAOC,IAAI,CAAC2C,MAC7B,IAAIE,YAAc,MAClB,MAAMC,QAA4D,CAAC,EAEnE,IAAK,IAAIrC,EAAI,EAAGA,EAAImC,SAASlC,MAAM,CAAED,IAAK,CACzC,MAAME,IAAMiC,QAAQ,CAACnC,EAAE,CACvB,GAAIE,MAAQnB,UAAW,SACvB,MAAMgD,IAAMG,IAAI,CAAChC,IAAI,CACrB,GAAI6B,MAAQhD,UAAW,SACvB,GAAIF,MAAMC,OAAO,CAACiD,KAAM,CAEvBM,OAAO,CAACnC,IAAI,CAAG6B,GAChB,MAAO,GAAIC,GAAAA,iBAAU,EAACD,KAAM,CAE3B,MAAM3B,WAAa7B,UAAUwD,IAC7BM,CAAAA,OAAO,CAACnC,IAAI,CAAGE,WACf,GAAIA,aAAe2B,IAAKK,YAAc,IACvC,KAAO,CACNC,OAAO,CAACnC,IAAI,CAAG6B,GAChB,CACD,CAEA,GAAIK,YAAa,CAChBZ,aAAaS,YAAY,CAAGI,OAC7B,CACD,CAGA,GAAIjD,OAAOkD,KAAK,CAAE,CACjB,GAAIzD,MAAMC,OAAO,CAACM,OAAOkD,KAAK,EAAG,CAEhC,MAAMA,MAAQlD,OAAOkD,KAAK,CAC1B,IAAIC,aAAe,MACnB,MAAMC,SAAoC,IAAI3D,MAAMyD,MAAMrC,MAAM,EAEhE,IAAK,IAAID,EAAI,EAAGA,EAAIsC,MAAMrC,MAAM,CAAED,IAAK,CACtC,MAAMG,SAAWmC,KAAK,CAACtC,EAAE,CACzB,GAAIG,WAAapB,UAAW,SAC5B,MAAMqB,WAAa7B,UAAU4B,SAC7BqC,CAAAA,QAAQ,CAACxC,EAAE,CAAGI,WACd,GAAIA,aAAeD,SAAUoC,aAAe,IAC7C,CAEA,GAAIA,aAAc,CACjBf,aAAac,KAAK,CAAGE,QACtB,CACD,MAAO,GAAIR,GAAAA,iBAAU,EAAC5C,OAAOkD,KAAK,EAAG,CAEpC,MAAMlC,WAAa7B,UAAUa,OAAOkD,KAAK,EACzC,GAAIlC,aAAehB,OAAOkD,KAAK,CAAE,CAChCd,aAAac,KAAK,CAAGlC,UACtB,CACD,CACD,CAGA,IAAK,MAAMF,OAAOlB,uBAAwB,CACzC,MAAM+C,IAAM3C,MAAM,CAACc,IAAI,CACvB,GAAI6B,MAAQhD,WAAa,OAAOgD,MAAQ,UAAW,CAClD,MAAM3B,WAAa7B,UAAUwD,KAC7B,GAAI3B,aAAe2B,IAAK,CACvB,AAACP,YAAsD,CAACtB,IAAI,CAC3DE,UACF,CACD,CACD,CAWA,GACCG,GAAAA,aAAM,EAACnB,OAAQ,QACf4C,GAAAA,iBAAU,EAAC5C,OAAOqD,GAAG,GACrB,OAAOrD,OAAOqD,GAAG,GAAK,UACrB,CACD,MAAMC,UAAYtD,OAAOqD,GAAG,CAC5B,GACClC,GAAAA,aAAM,EAACmC,UAAW,QAClBvD,gBAAgBuD,YAChBV,GAAAA,iBAAU,EAACU,UAAUD,GAAG,GACxB,OAAOC,UAAUD,GAAG,GAAK,UACxB,CAED,MAAME,YAAcD,UAAUD,GAAG,CACjC,MAAMd,EAAIH,YAEV,QAAOG,EAAEc,GAAG,CAEZ,MAAMG,UAAYtD,OAAOC,IAAI,CAACoD,aAC9B,IAAK,IAAI3C,EAAI,EAAGA,EAAI4C,UAAU3C,MAAM,CAAED,IAAK,CAC1C,MAAM6C,GAAKD,SAAS,CAAC5C,EAAE,CACvB,GAAI6C,KAAO9D,UAAW,QACtB,CAAC4C,CAA6B,CAACkB,GAAG,CAAG,AACpCF,WACA,CAACE,GAAG,AACN,CACD,CACD,CAGA,IAAK,MAAM3C,OAAOP,sBAAuB,CACxC,MAAMoC,IAAM3C,MAAM,CAACc,IAAI,CACvB,GAAIrB,MAAMC,OAAO,CAACiD,KAAM,CACvB,MAAMe,IAAMf,IACZ,IAAIgB,WAAa,MACjB,MAAMC,OAAkC,IAAInE,MAAMiE,IAAI7C,MAAM,EAE5D,IAAK,IAAID,EAAI,EAAGA,EAAI8C,IAAI7C,MAAM,CAAED,IAAK,CACpC,MAAMG,SAAW2C,GAAG,CAAC9C,EAAE,CACvB,GAAIG,WAAapB,UAAW,SAC5B,MAAMqB,WAAa7B,UAAU4B,SAC7B6C,CAAAA,MAAM,CAAChD,EAAE,CAAGI,WACZ,GAAIA,aAAeD,SAAU4C,WAAa,IAC3C,CAEA,GAAIA,WAAY,CACfvB,YAAY,CAACtB,IAAI,CAAG8C,MACrB,CACD,CACD,CAKA,MAAM3C,OAAUkB,OAASnC,OAASgC,IAGlC5C,eAAeyE,GAAG,CAAC7B,IAAKf,QAExB,OAAOA,MACR"}
|
|
@@ -0,0 +1,2 @@
|
|
|
1
|
+
"use strict";Object.defineProperty(exports,"__esModule",{value:true});function _export(target,all){for(var name in all)Object.defineProperty(target,name,{enumerable:true,get:Object.getOwnPropertyDescriptor(all,name).get})}_export(exports,{get arePatternsEquivalent(){return arePatternsEquivalent},get clearPatternCaches(){return clearPatternCaches},get isPatternSubset(){return isPatternSubset},get isTrivialPattern(){return isTrivialPattern}});const _randexp=/*#__PURE__*/_interop_require_default(require("randexp"));function _interop_require_default(obj){return obj&&obj.__esModule?obj:{default:obj}}const DEFAULT_SAMPLE_COUNT=200;const MAX_GENERATED_LENGTH=100;const MAX_REPETITION=20;const subsetCache=new Map;const regexCache=new Map;const generatorCache=new Map;function clearPatternCaches(){subsetCache.clear();regexCache.clear();generatorCache.clear()}const UNIVERSAL_PATTERNS=new Set([".*",".+","^.*$","^.+$","^.*",".*$","^.+",".+$","(?:.*)","(?:.+)"]);const _ANCHORED_UNIVERSAL_REGEX=/^\^?\(?:\.\*\)?\$?$|^\^?\.\*\$?$|^\^?\.\+\$?$/;function createGenerator(pattern){const cached=generatorCache.get(pattern);if(cached!==undefined)return cached;try{const randexp=new _randexp.default(pattern);randexp.max=MAX_REPETITION;generatorCache.set(pattern,randexp);return randexp}catch{generatorCache.set(pattern,null);return null}}function compileRegex(pattern){const cached=regexCache.get(pattern);if(cached!==undefined)return cached;try{const regex=new RegExp(pattern);regexCache.set(pattern,regex);return regex}catch{regexCache.set(pattern,null);return null}}function isUniversalSuperset(pattern){const trimmed=pattern.trim();if(trimmed===""||UNIVERSAL_PATTERNS.has(trimmed))return true;if(trimmed==="^(.*)$"||trimmed==="^(.+)$")return true;return false}function quickSubsetCheck(_subPattern,supPattern){if(isUniversalSuperset(supPattern))return true;return null}function isPatternSubset(subPattern,supPattern,sampleCount=DEFAULT_SAMPLE_COUNT){if(subPattern===supPattern)return true;const cacheKey=`${subPattern}\0${supPattern}\0${sampleCount}`;const cached=subsetCache.get(cacheKey);if(cached!==undefined)return cached;const quick=quickSubsetCheck(subPattern,supPattern);if(quick!==null){subsetCache.set(cacheKey,quick);return quick}const supRegex=compileRegex(supPattern);if(supRegex===null){subsetCache.set(cacheKey,null);return null}const generator=createGenerator(subPattern);if(generator===null){subsetCache.set(cacheKey,null);return null}const seen=new Set;let validSamples=0;let attempts=0;const maxAttempts=sampleCount*3;while(validSamples<sampleCount&&attempts<maxAttempts){attempts++;const sample=generator.gen();if(typeof sample!=="string"||sample.length>MAX_GENERATED_LENGTH){continue}if(seen.has(sample)){continue}seen.add(sample);validSamples++;if(!supRegex.test(sample)){subsetCache.set(cacheKey,false);return false}}if(validSamples===0){subsetCache.set(cacheKey,null);return null}subsetCache.set(cacheKey,true);return true}function arePatternsEquivalent(patternA,patternB,sampleCount=DEFAULT_SAMPLE_COUNT){if(patternA===patternB)return true;const aSubB=isPatternSubset(patternA,patternB,sampleCount);if(aSubB===null)return null;if(aSubB===false)return false;const bSubA=isPatternSubset(patternB,patternA,sampleCount);if(bSubA===null)return null;return bSubA}function isTrivialPattern(pattern){const trimmed=pattern.trim();if(trimmed==="")return true;return UNIVERSAL_PATTERNS.has(trimmed)}
|
|
2
|
+
//# sourceMappingURL=pattern-subset.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/pattern-subset.ts"],"sourcesContent":["import RandExp from \"randexp\";\n\n// ─── Pattern Subset Checker ──────────────────────────────────────────────────\n//\n// Vérifie si un pattern regex est un sous-ensemble d'un autre pattern regex\n// via une approche par échantillonnage (sampling).\n//\n// Principe :\n// L(A) ⊆ L(B) ⟺ ∀s ∈ L(A), s ∈ L(B)\n//\n// On ne peut pas prouver ça formellement pour des regex ECMA-262 arbitraires\n// (le problème est PSPACE-complet pour les regex pures, et indécidable avec\n// les extensions comme les backreferences).\n//\n// Approche pragmatique :\n// 1. Générer N strings aléatoires matchant le pattern sub (via `randexp`)\n// 2. Vérifier que CHAQUE string matche aussi le pattern sup\n// 3. Si toutes matchent → retourner `true` (confiance haute)\n// 4. Si au moins une ne matche pas → retourner `false` (certain)\n// 5. Si la génération échoue → retourner `null` (indéterminé)\n//\n// Limites :\n// - Faux positifs possibles (mais très improbables avec N suffisant)\n// - Ne gère pas les regex avec backreferences complexes\n// - `randexp` peut générer des strings biaisées (pas uniformément distribuées)\n//\n// Pour mitiger les faux positifs, on utilise :\n// - Un nombre d'échantillons élevé (200 par défaut)\n// - Plusieurs seeds pour diversifier la génération\n// - Un fallback `null` en cas de doute\n\n// ─── Configuration ───────────────────────────────────────────────────────────\n\n/** Nombre d'échantillons générés par défaut */\nconst DEFAULT_SAMPLE_COUNT = 200;\n\n/** Longueur maximale des strings générées par randexp */\nconst MAX_GENERATED_LENGTH = 100;\n\n/** Nombre maximal de répétitions pour les quantificateurs unbounded (*, +, {n,}) */\nconst MAX_REPETITION = 20;\n\n// ─── Result Cache ────────────────────────────────────────────────────────────\n\n/**\n * Cache des résultats de isPatternSubset pour éviter de recalculer\n * les mêmes comparaisons. La clé est `${subPattern}\\0${supPattern}\\0${sampleCount}`.\n */\nconst subsetCache = new Map<string, boolean | null>();\n\n/**\n * Cache des résultats de compilation RegExp pour éviter les recompilations.\n */\nconst regexCache = new Map<string, RegExp | null>();\n\n/**\n * Cache des générateurs RandExp pour éviter les re-créations.\n */\nconst generatorCache = new Map<string, RandExp | null>();\n\n/**\n * Vide les caches internes. Utile pour les tests ou la gestion mémoire.\n */\nexport function clearPatternCaches(): void {\n\tsubsetCache.clear();\n\tregexCache.clear();\n\tgeneratorCache.clear();\n}\n\n// ─── Trivial pattern detection (module-level constants) ──────────────────────\n\n/**\n * Patterns universels connus — matchent toute string (ou presque).\n * Défini au niveau du module pour éviter de recréer le Set à chaque appel.\n */\nconst UNIVERSAL_PATTERNS: ReadonlySet<string> = new Set([\n\t\".*\",\n\t\".+\",\n\t\"^.*$\",\n\t\"^.+$\",\n\t\"^.*\",\n\t\".*$\",\n\t\"^.+\",\n\t\".+$\",\n\t\"(?:.*)\",\n\t\"(?:.+)\",\n]);\n\n/**\n * Patterns anchored universal — variantes fréquentes qui matchent tout.\n * Utilisé pour la détection rapide de superset universels.\n */\nconst _ANCHORED_UNIVERSAL_REGEX =\n\t/^\\^?\\(?:\\.\\*\\)?\\$?$|^\\^?\\.\\*\\$?$|^\\^?\\.\\+\\$?$/;\n\n// ─── Internal helpers ────────────────────────────────────────────────────────\n\n/**\n * Crée un générateur RandExp configuré pour un pattern donné (avec cache).\n *\n * @param pattern Le pattern regex source\n * @returns L'instance RandExp configurée, ou null si le pattern est invalide\n */\nfunction createGenerator(pattern: string): RandExp | null {\n\tconst cached = generatorCache.get(pattern);\n\tif (cached !== undefined) return cached;\n\n\ttry {\n\t\tconst randexp = new RandExp(pattern);\n\t\trandexp.max = MAX_REPETITION;\n\t\tgeneratorCache.set(pattern, randexp);\n\t\treturn randexp;\n\t} catch {\n\t\tgeneratorCache.set(pattern, null);\n\t\treturn null;\n\t}\n}\n\n/**\n * Compile un pattern en RegExp avec gestion d'erreur et cache.\n *\n * @param pattern Le pattern regex à compiler\n * @returns L'objet RegExp compilé, ou null si invalide\n */\nfunction compileRegex(pattern: string): RegExp | null {\n\tconst cached = regexCache.get(pattern);\n\tif (cached !== undefined) return cached;\n\n\ttry {\n\t\tconst regex = new RegExp(pattern);\n\t\tregexCache.set(pattern, regex);\n\t\treturn regex;\n\t} catch {\n\t\tregexCache.set(pattern, null);\n\t\treturn null;\n\t}\n}\n\n/**\n * Vérifie si un pattern est un superset universel (matche tout).\n * Utilisé pour court-circuiter le sampling quand sup matche tout.\n */\nfunction isUniversalSuperset(pattern: string): boolean {\n\tconst trimmed = pattern.trim();\n\tif (trimmed === \"\" || UNIVERSAL_PATTERNS.has(trimmed)) return true;\n\n\t// Vérifier des variantes avec anchors optionnels\n\t// Ex: \"^(.*)$\", \"^(.+)$\" etc.\n\tif (trimmed === \"^(.*)$\" || trimmed === \"^(.+)$\") return true;\n\n\treturn false;\n}\n\n/**\n * Vérifie si subPattern est un sous-string littéral de supPattern\n * ou si le sup est clairement plus large (heuristiques rapides).\n * Retourne true si sub ⊆ sup est garanti, false sinon.\n */\nfunction quickSubsetCheck(\n\t_subPattern: string,\n\tsupPattern: string,\n): boolean | null {\n\t// Si sup est universel → tout est un sous-ensemble\n\tif (isUniversalSuperset(supPattern)) return true;\n\n\t// Si sub est un pattern littéral exact (anchored string sans métacaractères)\n\t// et que sup contient une classe de caractères ou quantificateur qui l'englobe,\n\t// on ne peut pas le déterminer facilement → null\n\treturn null;\n}\n\n// ─── Public API ──────────────────────────────────────────────────────────────\n\n/**\n * Vérifie si le langage du pattern `sub` est un sous-ensemble du langage\n * du pattern `sup` via échantillonnage.\n *\n * `sub ⊆ sup` signifie : toute string matchant `sub` matche aussi `sup`.\n *\n * Contrat ternaire :\n * - `true` → toutes les strings échantillonnées de sub matchent sup\n * (confiance haute, pas une preuve formelle)\n * - `false` → au moins une string de sub ne matche PAS sup\n * (certain — c'est un contre-exemple concret)\n * - `null` → impossible de déterminer (pattern invalide, génération échouée)\n *\n * @param subPattern Le pattern regex du schema sub\n * @param supPattern Le pattern regex du schema sup\n * @param sampleCount Nombre d'échantillons (défaut: 200)\n * @returns `true`, `false`, ou `null`\n *\n * @example\n * ```ts\n * isPatternSubset(\"^[a-z]{3}$\", \"^[a-z]+$\"); // true — 3 lettres ⊆ 1+ lettres\n * isPatternSubset(\"^[a-z]+$\", \"^[0-9]+$\"); // false — lettres ⊄ chiffres\n * isPatternSubset(\"^[a-z]+$\", \"^[a-z]{3}$\"); // false — \"ab\" matche sub mais pas sup\n * isPatternSubset(\"invalid[\", \"^[a-z]+$\"); // null — pattern invalide\n * ```\n */\nexport function isPatternSubset(\n\tsubPattern: string,\n\tsupPattern: string,\n\tsampleCount: number = DEFAULT_SAMPLE_COUNT,\n): boolean | null {\n\t// ── Identité : même pattern → toujours subset ──\n\tif (subPattern === supPattern) return true;\n\n\t// ── Cache lookup ──\n\tconst cacheKey = `${subPattern}\\0${supPattern}\\0${sampleCount}`;\n\tconst cached = subsetCache.get(cacheKey);\n\tif (cached !== undefined) return cached;\n\n\t// ── Quick checks avant le sampling ──\n\tconst quick = quickSubsetCheck(subPattern, supPattern);\n\tif (quick !== null) {\n\t\tsubsetCache.set(cacheKey, quick);\n\t\treturn quick;\n\t}\n\n\t// ── Compiler le pattern sup ──\n\tconst supRegex = compileRegex(supPattern);\n\tif (supRegex === null) {\n\t\tsubsetCache.set(cacheKey, null);\n\t\treturn null;\n\t}\n\n\t// ── Créer le générateur pour sub ──\n\tconst generator = createGenerator(subPattern);\n\tif (generator === null) {\n\t\tsubsetCache.set(cacheKey, null);\n\t\treturn null;\n\t}\n\n\t// ── Générer et vérifier les échantillons paresseusement ──\n\t// Au lieu de générer tous les échantillons d'abord puis les vérifier,\n\t// on génère et vérifie un par un pour permettre un arrêt précoce\n\t// dès qu'un contre-exemple est trouvé.\n\tconst seen = new Set<string>();\n\tlet validSamples = 0;\n\tlet attempts = 0;\n\tconst maxAttempts = sampleCount * 3; // Éviter les boucles infinies\n\n\twhile (validSamples < sampleCount && attempts < maxAttempts) {\n\t\tattempts++;\n\t\tconst sample = generator.gen();\n\n\t\t// Valider que l'échantillon est utilisable\n\t\tif (typeof sample !== \"string\" || sample.length > MAX_GENERATED_LENGTH) {\n\t\t\tcontinue;\n\t\t}\n\n\t\t// Dédupliquer les échantillons\n\t\tif (seen.has(sample)) {\n\t\t\tcontinue;\n\t\t}\n\t\tseen.add(sample);\n\t\tvalidSamples++;\n\n\t\t// ── Vérification immédiate contre sup ──\n\t\t// Arrêt précoce dès qu'un contre-exemple est trouvé\n\t\tif (!supRegex.test(sample)) {\n\t\t\t// Contre-exemple trouvé → sub ⊄ sup (certain)\n\t\t\tsubsetCache.set(cacheKey, false);\n\t\t\treturn false;\n\t\t}\n\t}\n\n\t// Si aucun échantillon n'a pu être généré → indéterminé\n\tif (validSamples === 0) {\n\t\tsubsetCache.set(cacheKey, null);\n\t\treturn null;\n\t}\n\n\t// Tous les échantillons matchent → sub ⊆ sup (confiance haute)\n\tsubsetCache.set(cacheKey, true);\n\treturn true;\n}\n\n/**\n * Vérifie si deux patterns sont équivalents (acceptent le même langage)\n * via échantillonnage bidirectionnel.\n *\n * `A ≡ B` signifie : `A ⊆ B` ET `B ⊆ A`.\n *\n * @param patternA Premier pattern regex\n * @param patternB Second pattern regex\n * @param sampleCount Nombre d'échantillons par direction (défaut: 200)\n * @returns `true`, `false`, ou `null`\n */\nexport function arePatternsEquivalent(\n\tpatternA: string,\n\tpatternB: string,\n\tsampleCount: number = DEFAULT_SAMPLE_COUNT,\n): boolean | null {\n\tif (patternA === patternB) return true;\n\n\tconst aSubB = isPatternSubset(patternA, patternB, sampleCount);\n\tif (aSubB === null) return null;\n\tif (aSubB === false) return false;\n\n\tconst bSubA = isPatternSubset(patternB, patternA, sampleCount);\n\tif (bSubA === null) return null;\n\n\treturn bSubA;\n}\n\n/**\n * Vérifie si un pattern est \"trivially universal\" — i.e., il matche\n * toute string (ou presque). Utile pour détecter les patterns qui\n * n'ajoutent aucune contrainte réelle.\n *\n * Patterns détectés comme universels :\n * - `.*`\n * - `.+` (matche tout sauf la string vide)\n * - `^.*$`\n * - `^.+$`\n * - Patterns vides ou whitespace\n *\n * @param pattern Le pattern à vérifier\n * @returns `true` si le pattern est trivial/universel\n */\nexport function isTrivialPattern(pattern: string): boolean {\n\tconst trimmed = pattern.trim();\n\tif (trimmed === \"\") return true;\n\n\treturn UNIVERSAL_PATTERNS.has(trimmed);\n}\n"],"names":["arePatternsEquivalent","clearPatternCaches","isPatternSubset","isTrivialPattern","DEFAULT_SAMPLE_COUNT","MAX_GENERATED_LENGTH","MAX_REPETITION","subsetCache","Map","regexCache","generatorCache","clear","UNIVERSAL_PATTERNS","Set","_ANCHORED_UNIVERSAL_REGEX","createGenerator","pattern","cached","get","undefined","randexp","RandExp","max","set","compileRegex","regex","RegExp","isUniversalSuperset","trimmed","trim","has","quickSubsetCheck","_subPattern","supPattern","subPattern","sampleCount","cacheKey","quick","supRegex","generator","seen","validSamples","attempts","maxAttempts","sample","gen","length","add","test","patternA","patternB","aSubB","bSubA"],"mappings":"mPAiSgBA,+BAAAA,2BAlOAC,4BAAAA,wBAwIAC,yBAAAA,qBA0HAC,0BAAAA,iFAjUI,gGAkCpB,MAAMC,qBAAuB,IAG7B,MAAMC,qBAAuB,IAG7B,MAAMC,eAAiB,GAQvB,MAAMC,YAAc,IAAIC,IAKxB,MAAMC,WAAa,IAAID,IAKvB,MAAME,eAAiB,IAAIF,IAKpB,SAASP,qBACfM,YAAYI,KAAK,GACjBF,WAAWE,KAAK,GAChBD,eAAeC,KAAK,EACrB,CAQA,MAAMC,mBAA0C,IAAIC,IAAI,CACvD,KACA,KACA,OACA,OACA,MACA,MACA,MACA,MACA,SACA,SACA,EAMD,MAAMC,0BACL,gDAUD,SAASC,gBAAgBC,OAAe,EACvC,MAAMC,OAASP,eAAeQ,GAAG,CAACF,SAClC,GAAIC,SAAWE,UAAW,OAAOF,OAEjC,GAAI,CACH,MAAMG,QAAU,IAAIC,gBAAO,CAACL,QAC5BI,CAAAA,QAAQE,GAAG,CAAGhB,eACdI,eAAea,GAAG,CAACP,QAASI,SAC5B,OAAOA,OACR,CAAE,KAAM,CACPV,eAAea,GAAG,CAACP,QAAS,MAC5B,OAAO,IACR,CACD,CAQA,SAASQ,aAAaR,OAAe,EACpC,MAAMC,OAASR,WAAWS,GAAG,CAACF,SAC9B,GAAIC,SAAWE,UAAW,OAAOF,OAEjC,GAAI,CACH,MAAMQ,MAAQ,IAAIC,OAAOV,SACzBP,WAAWc,GAAG,CAACP,QAASS,OACxB,OAAOA,KACR,CAAE,KAAM,CACPhB,WAAWc,GAAG,CAACP,QAAS,MACxB,OAAO,IACR,CACD,CAMA,SAASW,oBAAoBX,OAAe,EAC3C,MAAMY,QAAUZ,QAAQa,IAAI,GAC5B,GAAID,UAAY,IAAMhB,mBAAmBkB,GAAG,CAACF,SAAU,OAAO,KAI9D,GAAIA,UAAY,UAAYA,UAAY,SAAU,OAAO,KAEzD,OAAO,KACR,CAOA,SAASG,iBACRC,WAAmB,CACnBC,UAAkB,EAGlB,GAAIN,oBAAoBM,YAAa,OAAO,KAK5C,OAAO,IACR,CA8BO,SAAS/B,gBACfgC,UAAkB,CAClBD,UAAkB,CAClBE,YAAsB/B,oBAAoB,EAG1C,GAAI8B,aAAeD,WAAY,OAAO,KAGtC,MAAMG,SAAW,CAAC,EAAEF,WAAW,EAAE,EAAED,WAAW,EAAE,EAAEE,YAAY,CAAC,CAC/D,MAAMlB,OAASV,YAAYW,GAAG,CAACkB,UAC/B,GAAInB,SAAWE,UAAW,OAAOF,OAGjC,MAAMoB,MAAQN,iBAAiBG,WAAYD,YAC3C,GAAII,QAAU,KAAM,CACnB9B,YAAYgB,GAAG,CAACa,SAAUC,OAC1B,OAAOA,KACR,CAGA,MAAMC,SAAWd,aAAaS,YAC9B,GAAIK,WAAa,KAAM,CACtB/B,YAAYgB,GAAG,CAACa,SAAU,MAC1B,OAAO,IACR,CAGA,MAAMG,UAAYxB,gBAAgBmB,YAClC,GAAIK,YAAc,KAAM,CACvBhC,YAAYgB,GAAG,CAACa,SAAU,MAC1B,OAAO,IACR,CAMA,MAAMI,KAAO,IAAI3B,IACjB,IAAI4B,aAAe,EACnB,IAAIC,SAAW,EACf,MAAMC,YAAcR,YAAc,EAElC,MAAOM,aAAeN,aAAeO,SAAWC,YAAa,CAC5DD,WACA,MAAME,OAASL,UAAUM,GAAG,GAG5B,GAAI,OAAOD,SAAW,UAAYA,OAAOE,MAAM,CAAGzC,qBAAsB,CACvE,QACD,CAGA,GAAImC,KAAKV,GAAG,CAACc,QAAS,CACrB,QACD,CACAJ,KAAKO,GAAG,CAACH,OACTH,CAAAA,eAIA,GAAI,CAACH,SAASU,IAAI,CAACJ,QAAS,CAE3BrC,YAAYgB,GAAG,CAACa,SAAU,OAC1B,OAAO,KACR,CACD,CAGA,GAAIK,eAAiB,EAAG,CACvBlC,YAAYgB,GAAG,CAACa,SAAU,MAC1B,OAAO,IACR,CAGA7B,YAAYgB,GAAG,CAACa,SAAU,MAC1B,OAAO,IACR,CAaO,SAASpC,sBACfiD,QAAgB,CAChBC,QAAgB,CAChBf,YAAsB/B,oBAAoB,EAE1C,GAAI6C,WAAaC,SAAU,OAAO,KAElC,MAAMC,MAAQjD,gBAAgB+C,SAAUC,SAAUf,aAClD,GAAIgB,QAAU,KAAM,OAAO,KAC3B,GAAIA,QAAU,MAAO,OAAO,MAE5B,MAAMC,MAAQlD,gBAAgBgD,SAAUD,SAAUd,aAClD,GAAIiB,QAAU,KAAM,OAAO,KAE3B,OAAOA,KACR,CAiBO,SAASjD,iBAAiBa,OAAe,EAC/C,MAAMY,QAAUZ,QAAQa,IAAI,GAC5B,GAAID,UAAY,GAAI,OAAO,KAE3B,OAAOhB,mBAAmBkB,GAAG,CAACF,QAC/B"}
|