@hyperjump/json-schema 0.23.2 → 0.23.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/json-schema-amd.js +933 -42
- package/dist/json-schema-amd.js.map +1 -1
- package/dist/json-schema-amd.min.js +2 -3
- package/dist/json-schema-amd.min.js.map +1 -1
- package/dist/json-schema-cjs.js +933 -42
- package/dist/json-schema-cjs.js.map +1 -1
- package/dist/json-schema-cjs.min.js +2 -3
- package/dist/json-schema-cjs.min.js.map +1 -1
- package/dist/json-schema-esm.js +933 -42
- package/dist/json-schema-esm.js.map +1 -1
- package/dist/json-schema-esm.min.js +2 -3
- package/dist/json-schema-esm.min.js.map +1 -1
- package/dist/json-schema-iife.js +933 -42
- package/dist/json-schema-iife.js.map +1 -1
- package/dist/json-schema-iife.min.js +2 -3
- package/dist/json-schema-iife.min.js.map +1 -1
- package/dist/json-schema-system.js +933 -42
- package/dist/json-schema-system.js.map +1 -1
- package/dist/json-schema-system.min.js +2 -3
- package/dist/json-schema-system.min.js.map +1 -1
- package/dist/json-schema-umd.js +933 -42
- package/dist/json-schema-umd.js.map +1 -1
- package/dist/json-schema-umd.min.js +2 -3
- package/dist/json-schema-umd.min.js.map +1 -1
- package/lib/common.d.ts +1 -0
- package/lib/draft-04.d.ts +3 -2
- package/lib/draft-06.d.ts +3 -2
- package/lib/draft-07.d.ts +3 -2
- package/lib/draft-2019-09.d.ts +3 -2
- package/lib/draft-2020-12.d.ts +3 -2
- package/package.json +3 -3
package/dist/json-schema-amd.js
CHANGED
|
@@ -2016,9 +2016,9 @@ define(['exports'], (function (exports) { 'use strict';
|
|
|
2016
2016
|
}
|
|
2017
2017
|
};
|
|
2018
2018
|
|
|
2019
|
-
const append = curry$a((segment, pointer) => pointer + "/" + escape(segment));
|
|
2019
|
+
const append = curry$a((segment, pointer) => pointer + "/" + escape$1(segment));
|
|
2020
2020
|
|
|
2021
|
-
const escape = (segment) => segment.toString().replace(/~/g, "~0").replace(/\//g, "~1");
|
|
2021
|
+
const escape$1 = (segment) => segment.toString().replace(/~/g, "~0").replace(/\//g, "~1");
|
|
2022
2022
|
const unescape = (segment) => segment.toString().replace(/~1/g, "/").replace(/~0/g, "~");
|
|
2023
2023
|
const computeSegment = (value, segment) => Array.isArray(value) && segment === "-" ? value.length : segment;
|
|
2024
2024
|
|
|
@@ -2037,7 +2037,7 @@ define(['exports'], (function (exports) { 'use strict';
|
|
|
2037
2037
|
|
|
2038
2038
|
const isScalar = (value) => value === null || typeof value !== "object";
|
|
2039
2039
|
|
|
2040
|
-
var lib$
|
|
2040
|
+
var lib$4 = { nil: nil$2, append, get: get$2, set, assign, unset, remove };
|
|
2041
2041
|
|
|
2042
2042
|
const $__value = Symbol("$__value");
|
|
2043
2043
|
const $__href = Symbol("$__href");
|
|
@@ -2053,7 +2053,7 @@ define(['exports'], (function (exports) { 'use strict';
|
|
|
2053
2053
|
|
|
2054
2054
|
var reference = { cons: cons$1, isReference, href, value: value$2 };
|
|
2055
2055
|
|
|
2056
|
-
const JsonPointer$
|
|
2056
|
+
const JsonPointer$3 = lib$4;
|
|
2057
2057
|
const curry$9 = justCurryIt$1;
|
|
2058
2058
|
const { resolveUrl: resolveUrl$2, jsonTypeOf: jsonTypeOf$1 } = common$1;
|
|
2059
2059
|
const Reference$2 = reference;
|
|
@@ -2077,7 +2077,7 @@ define(['exports'], (function (exports) { 'use strict';
|
|
|
2077
2077
|
|
|
2078
2078
|
const step$1 = (key, doc) => Object.freeze({
|
|
2079
2079
|
...doc,
|
|
2080
|
-
pointer: JsonPointer$
|
|
2080
|
+
pointer: JsonPointer$3.append(key, doc.pointer),
|
|
2081
2081
|
value: value$1(doc)[key]
|
|
2082
2082
|
});
|
|
2083
2083
|
|
|
@@ -2221,7 +2221,7 @@ define(['exports'], (function (exports) { 'use strict';
|
|
|
2221
2221
|
], doc);
|
|
2222
2222
|
};
|
|
2223
2223
|
|
|
2224
|
-
var lib$
|
|
2224
|
+
var lib$3 = {
|
|
2225
2225
|
entries: entries$2,
|
|
2226
2226
|
map: map$3,
|
|
2227
2227
|
filter: filter,
|
|
@@ -2233,6 +2233,889 @@ define(['exports'], (function (exports) { 'use strict';
|
|
|
2233
2233
|
allValues: allValues
|
|
2234
2234
|
};
|
|
2235
2235
|
|
|
2236
|
+
var moo$1 = {exports: {}};
|
|
2237
|
+
|
|
2238
|
+
(function (module) {
|
|
2239
|
+
(function(root, factory) {
|
|
2240
|
+
if (module.exports) {
|
|
2241
|
+
module.exports = factory();
|
|
2242
|
+
} else {
|
|
2243
|
+
root.moo = factory();
|
|
2244
|
+
}
|
|
2245
|
+
}(commonjsGlobal, function() {
|
|
2246
|
+
|
|
2247
|
+
var hasOwnProperty = Object.prototype.hasOwnProperty;
|
|
2248
|
+
var toString = Object.prototype.toString;
|
|
2249
|
+
var hasSticky = typeof new RegExp().sticky === 'boolean';
|
|
2250
|
+
|
|
2251
|
+
/***************************************************************************/
|
|
2252
|
+
|
|
2253
|
+
function isRegExp(o) { return o && toString.call(o) === '[object RegExp]' }
|
|
2254
|
+
function isObject(o) { return o && typeof o === 'object' && !isRegExp(o) && !Array.isArray(o) }
|
|
2255
|
+
|
|
2256
|
+
function reEscape(s) {
|
|
2257
|
+
return s.replace(/[-\/\\^$*+?.()|[\]{}]/g, '\\$&')
|
|
2258
|
+
}
|
|
2259
|
+
function reGroups(s) {
|
|
2260
|
+
var re = new RegExp('|' + s);
|
|
2261
|
+
return re.exec('').length - 1
|
|
2262
|
+
}
|
|
2263
|
+
function reCapture(s) {
|
|
2264
|
+
return '(' + s + ')'
|
|
2265
|
+
}
|
|
2266
|
+
function reUnion(regexps) {
|
|
2267
|
+
if (!regexps.length) return '(?!)'
|
|
2268
|
+
var source = regexps.map(function(s) {
|
|
2269
|
+
return "(?:" + s + ")"
|
|
2270
|
+
}).join('|');
|
|
2271
|
+
return "(?:" + source + ")"
|
|
2272
|
+
}
|
|
2273
|
+
|
|
2274
|
+
function regexpOrLiteral(obj) {
|
|
2275
|
+
if (typeof obj === 'string') {
|
|
2276
|
+
return '(?:' + reEscape(obj) + ')'
|
|
2277
|
+
|
|
2278
|
+
} else if (isRegExp(obj)) {
|
|
2279
|
+
// TODO: consider /u support
|
|
2280
|
+
if (obj.ignoreCase) throw new Error('RegExp /i flag not allowed')
|
|
2281
|
+
if (obj.global) throw new Error('RegExp /g flag is implied')
|
|
2282
|
+
if (obj.sticky) throw new Error('RegExp /y flag is implied')
|
|
2283
|
+
if (obj.multiline) throw new Error('RegExp /m flag is implied')
|
|
2284
|
+
return obj.source
|
|
2285
|
+
|
|
2286
|
+
} else {
|
|
2287
|
+
throw new Error('Not a pattern: ' + obj)
|
|
2288
|
+
}
|
|
2289
|
+
}
|
|
2290
|
+
|
|
2291
|
+
function pad(s, length) {
|
|
2292
|
+
if (s.length > length) {
|
|
2293
|
+
return s
|
|
2294
|
+
}
|
|
2295
|
+
return Array(length - s.length + 1).join(" ") + s
|
|
2296
|
+
}
|
|
2297
|
+
|
|
2298
|
+
function lastNLines(string, numLines) {
|
|
2299
|
+
var position = string.length;
|
|
2300
|
+
var lineBreaks = 0;
|
|
2301
|
+
while (true) {
|
|
2302
|
+
var idx = string.lastIndexOf("\n", position - 1);
|
|
2303
|
+
if (idx === -1) {
|
|
2304
|
+
break;
|
|
2305
|
+
} else {
|
|
2306
|
+
lineBreaks++;
|
|
2307
|
+
}
|
|
2308
|
+
position = idx;
|
|
2309
|
+
if (lineBreaks === numLines) {
|
|
2310
|
+
break;
|
|
2311
|
+
}
|
|
2312
|
+
if (position === 0) {
|
|
2313
|
+
break;
|
|
2314
|
+
}
|
|
2315
|
+
}
|
|
2316
|
+
var startPosition =
|
|
2317
|
+
lineBreaks < numLines ?
|
|
2318
|
+
0 :
|
|
2319
|
+
position + 1;
|
|
2320
|
+
return string.substring(startPosition).split("\n")
|
|
2321
|
+
}
|
|
2322
|
+
|
|
2323
|
+
function objectToRules(object) {
|
|
2324
|
+
var keys = Object.getOwnPropertyNames(object);
|
|
2325
|
+
var result = [];
|
|
2326
|
+
for (var i = 0; i < keys.length; i++) {
|
|
2327
|
+
var key = keys[i];
|
|
2328
|
+
var thing = object[key];
|
|
2329
|
+
var rules = [].concat(thing);
|
|
2330
|
+
if (key === 'include') {
|
|
2331
|
+
for (var j = 0; j < rules.length; j++) {
|
|
2332
|
+
result.push({include: rules[j]});
|
|
2333
|
+
}
|
|
2334
|
+
continue
|
|
2335
|
+
}
|
|
2336
|
+
var match = [];
|
|
2337
|
+
rules.forEach(function(rule) {
|
|
2338
|
+
if (isObject(rule)) {
|
|
2339
|
+
if (match.length) result.push(ruleOptions(key, match));
|
|
2340
|
+
result.push(ruleOptions(key, rule));
|
|
2341
|
+
match = [];
|
|
2342
|
+
} else {
|
|
2343
|
+
match.push(rule);
|
|
2344
|
+
}
|
|
2345
|
+
});
|
|
2346
|
+
if (match.length) result.push(ruleOptions(key, match));
|
|
2347
|
+
}
|
|
2348
|
+
return result
|
|
2349
|
+
}
|
|
2350
|
+
|
|
2351
|
+
function arrayToRules(array) {
|
|
2352
|
+
var result = [];
|
|
2353
|
+
for (var i = 0; i < array.length; i++) {
|
|
2354
|
+
var obj = array[i];
|
|
2355
|
+
if (obj.include) {
|
|
2356
|
+
var include = [].concat(obj.include);
|
|
2357
|
+
for (var j = 0; j < include.length; j++) {
|
|
2358
|
+
result.push({include: include[j]});
|
|
2359
|
+
}
|
|
2360
|
+
continue
|
|
2361
|
+
}
|
|
2362
|
+
if (!obj.type) {
|
|
2363
|
+
throw new Error('Rule has no type: ' + JSON.stringify(obj))
|
|
2364
|
+
}
|
|
2365
|
+
result.push(ruleOptions(obj.type, obj));
|
|
2366
|
+
}
|
|
2367
|
+
return result
|
|
2368
|
+
}
|
|
2369
|
+
|
|
2370
|
+
function ruleOptions(type, obj) {
|
|
2371
|
+
if (!isObject(obj)) {
|
|
2372
|
+
obj = { match: obj };
|
|
2373
|
+
}
|
|
2374
|
+
if (obj.include) {
|
|
2375
|
+
throw new Error('Matching rules cannot also include states')
|
|
2376
|
+
}
|
|
2377
|
+
|
|
2378
|
+
// nb. error and fallback imply lineBreaks
|
|
2379
|
+
var options = {
|
|
2380
|
+
defaultType: type,
|
|
2381
|
+
lineBreaks: !!obj.error || !!obj.fallback,
|
|
2382
|
+
pop: false,
|
|
2383
|
+
next: null,
|
|
2384
|
+
push: null,
|
|
2385
|
+
error: false,
|
|
2386
|
+
fallback: false,
|
|
2387
|
+
value: null,
|
|
2388
|
+
type: null,
|
|
2389
|
+
shouldThrow: false,
|
|
2390
|
+
};
|
|
2391
|
+
|
|
2392
|
+
// Avoid Object.assign(), so we support IE9+
|
|
2393
|
+
for (var key in obj) {
|
|
2394
|
+
if (hasOwnProperty.call(obj, key)) {
|
|
2395
|
+
options[key] = obj[key];
|
|
2396
|
+
}
|
|
2397
|
+
}
|
|
2398
|
+
|
|
2399
|
+
// type transform cannot be a string
|
|
2400
|
+
if (typeof options.type === 'string' && type !== options.type) {
|
|
2401
|
+
throw new Error("Type transform cannot be a string (type '" + options.type + "' for token '" + type + "')")
|
|
2402
|
+
}
|
|
2403
|
+
|
|
2404
|
+
// convert to array
|
|
2405
|
+
var match = options.match;
|
|
2406
|
+
options.match = Array.isArray(match) ? match : match ? [match] : [];
|
|
2407
|
+
options.match.sort(function(a, b) {
|
|
2408
|
+
return isRegExp(a) && isRegExp(b) ? 0
|
|
2409
|
+
: isRegExp(b) ? -1 : isRegExp(a) ? +1 : b.length - a.length
|
|
2410
|
+
});
|
|
2411
|
+
return options
|
|
2412
|
+
}
|
|
2413
|
+
|
|
2414
|
+
function toRules(spec) {
|
|
2415
|
+
return Array.isArray(spec) ? arrayToRules(spec) : objectToRules(spec)
|
|
2416
|
+
}
|
|
2417
|
+
|
|
2418
|
+
var defaultErrorRule = ruleOptions('error', {lineBreaks: true, shouldThrow: true});
|
|
2419
|
+
function compileRules(rules, hasStates) {
|
|
2420
|
+
var errorRule = null;
|
|
2421
|
+
var fast = Object.create(null);
|
|
2422
|
+
var fastAllowed = true;
|
|
2423
|
+
var unicodeFlag = null;
|
|
2424
|
+
var groups = [];
|
|
2425
|
+
var parts = [];
|
|
2426
|
+
|
|
2427
|
+
// If there is a fallback rule, then disable fast matching
|
|
2428
|
+
for (var i = 0; i < rules.length; i++) {
|
|
2429
|
+
if (rules[i].fallback) {
|
|
2430
|
+
fastAllowed = false;
|
|
2431
|
+
}
|
|
2432
|
+
}
|
|
2433
|
+
|
|
2434
|
+
for (var i = 0; i < rules.length; i++) {
|
|
2435
|
+
var options = rules[i];
|
|
2436
|
+
|
|
2437
|
+
if (options.include) {
|
|
2438
|
+
// all valid inclusions are removed by states() preprocessor
|
|
2439
|
+
throw new Error('Inheritance is not allowed in stateless lexers')
|
|
2440
|
+
}
|
|
2441
|
+
|
|
2442
|
+
if (options.error || options.fallback) {
|
|
2443
|
+
// errorRule can only be set once
|
|
2444
|
+
if (errorRule) {
|
|
2445
|
+
if (!options.fallback === !errorRule.fallback) {
|
|
2446
|
+
throw new Error("Multiple " + (options.fallback ? "fallback" : "error") + " rules not allowed (for token '" + options.defaultType + "')")
|
|
2447
|
+
} else {
|
|
2448
|
+
throw new Error("fallback and error are mutually exclusive (for token '" + options.defaultType + "')")
|
|
2449
|
+
}
|
|
2450
|
+
}
|
|
2451
|
+
errorRule = options;
|
|
2452
|
+
}
|
|
2453
|
+
|
|
2454
|
+
var match = options.match.slice();
|
|
2455
|
+
if (fastAllowed) {
|
|
2456
|
+
while (match.length && typeof match[0] === 'string' && match[0].length === 1) {
|
|
2457
|
+
var word = match.shift();
|
|
2458
|
+
fast[word.charCodeAt(0)] = options;
|
|
2459
|
+
}
|
|
2460
|
+
}
|
|
2461
|
+
|
|
2462
|
+
// Warn about inappropriate state-switching options
|
|
2463
|
+
if (options.pop || options.push || options.next) {
|
|
2464
|
+
if (!hasStates) {
|
|
2465
|
+
throw new Error("State-switching options are not allowed in stateless lexers (for token '" + options.defaultType + "')")
|
|
2466
|
+
}
|
|
2467
|
+
if (options.fallback) {
|
|
2468
|
+
throw new Error("State-switching options are not allowed on fallback tokens (for token '" + options.defaultType + "')")
|
|
2469
|
+
}
|
|
2470
|
+
}
|
|
2471
|
+
|
|
2472
|
+
// Only rules with a .match are included in the RegExp
|
|
2473
|
+
if (match.length === 0) {
|
|
2474
|
+
continue
|
|
2475
|
+
}
|
|
2476
|
+
fastAllowed = false;
|
|
2477
|
+
|
|
2478
|
+
groups.push(options);
|
|
2479
|
+
|
|
2480
|
+
// Check unicode flag is used everywhere or nowhere
|
|
2481
|
+
for (var j = 0; j < match.length; j++) {
|
|
2482
|
+
var obj = match[j];
|
|
2483
|
+
if (!isRegExp(obj)) {
|
|
2484
|
+
continue
|
|
2485
|
+
}
|
|
2486
|
+
|
|
2487
|
+
if (unicodeFlag === null) {
|
|
2488
|
+
unicodeFlag = obj.unicode;
|
|
2489
|
+
} else if (unicodeFlag !== obj.unicode && options.fallback === false) {
|
|
2490
|
+
throw new Error('If one rule is /u then all must be')
|
|
2491
|
+
}
|
|
2492
|
+
}
|
|
2493
|
+
|
|
2494
|
+
// convert to RegExp
|
|
2495
|
+
var pat = reUnion(match.map(regexpOrLiteral));
|
|
2496
|
+
|
|
2497
|
+
// validate
|
|
2498
|
+
var regexp = new RegExp(pat);
|
|
2499
|
+
if (regexp.test("")) {
|
|
2500
|
+
throw new Error("RegExp matches empty string: " + regexp)
|
|
2501
|
+
}
|
|
2502
|
+
var groupCount = reGroups(pat);
|
|
2503
|
+
if (groupCount > 0) {
|
|
2504
|
+
throw new Error("RegExp has capture groups: " + regexp + "\nUse (?: … ) instead")
|
|
2505
|
+
}
|
|
2506
|
+
|
|
2507
|
+
// try and detect rules matching newlines
|
|
2508
|
+
if (!options.lineBreaks && regexp.test('\n')) {
|
|
2509
|
+
throw new Error('Rule should declare lineBreaks: ' + regexp)
|
|
2510
|
+
}
|
|
2511
|
+
|
|
2512
|
+
// store regex
|
|
2513
|
+
parts.push(reCapture(pat));
|
|
2514
|
+
}
|
|
2515
|
+
|
|
2516
|
+
|
|
2517
|
+
// If there's no fallback rule, use the sticky flag so we only look for
|
|
2518
|
+
// matches at the current index.
|
|
2519
|
+
//
|
|
2520
|
+
// If we don't support the sticky flag, then fake it using an irrefutable
|
|
2521
|
+
// match (i.e. an empty pattern).
|
|
2522
|
+
var fallbackRule = errorRule && errorRule.fallback;
|
|
2523
|
+
var flags = hasSticky && !fallbackRule ? 'ym' : 'gm';
|
|
2524
|
+
var suffix = hasSticky || fallbackRule ? '' : '|';
|
|
2525
|
+
|
|
2526
|
+
if (unicodeFlag === true) flags += "u";
|
|
2527
|
+
var combined = new RegExp(reUnion(parts) + suffix, flags);
|
|
2528
|
+
return {regexp: combined, groups: groups, fast: fast, error: errorRule || defaultErrorRule}
|
|
2529
|
+
}
|
|
2530
|
+
|
|
2531
|
+
function compile(rules) {
|
|
2532
|
+
var result = compileRules(toRules(rules));
|
|
2533
|
+
return new Lexer({start: result}, 'start')
|
|
2534
|
+
}
|
|
2535
|
+
|
|
2536
|
+
function checkStateGroup(g, name, map) {
|
|
2537
|
+
var state = g && (g.push || g.next);
|
|
2538
|
+
if (state && !map[state]) {
|
|
2539
|
+
throw new Error("Missing state '" + state + "' (in token '" + g.defaultType + "' of state '" + name + "')")
|
|
2540
|
+
}
|
|
2541
|
+
if (g && g.pop && +g.pop !== 1) {
|
|
2542
|
+
throw new Error("pop must be 1 (in token '" + g.defaultType + "' of state '" + name + "')")
|
|
2543
|
+
}
|
|
2544
|
+
}
|
|
2545
|
+
function compileStates(states, start) {
|
|
2546
|
+
var all = states.$all ? toRules(states.$all) : [];
|
|
2547
|
+
delete states.$all;
|
|
2548
|
+
|
|
2549
|
+
var keys = Object.getOwnPropertyNames(states);
|
|
2550
|
+
if (!start) start = keys[0];
|
|
2551
|
+
|
|
2552
|
+
var ruleMap = Object.create(null);
|
|
2553
|
+
for (var i = 0; i < keys.length; i++) {
|
|
2554
|
+
var key = keys[i];
|
|
2555
|
+
ruleMap[key] = toRules(states[key]).concat(all);
|
|
2556
|
+
}
|
|
2557
|
+
for (var i = 0; i < keys.length; i++) {
|
|
2558
|
+
var key = keys[i];
|
|
2559
|
+
var rules = ruleMap[key];
|
|
2560
|
+
var included = Object.create(null);
|
|
2561
|
+
for (var j = 0; j < rules.length; j++) {
|
|
2562
|
+
var rule = rules[j];
|
|
2563
|
+
if (!rule.include) continue
|
|
2564
|
+
var splice = [j, 1];
|
|
2565
|
+
if (rule.include !== key && !included[rule.include]) {
|
|
2566
|
+
included[rule.include] = true;
|
|
2567
|
+
var newRules = ruleMap[rule.include];
|
|
2568
|
+
if (!newRules) {
|
|
2569
|
+
throw new Error("Cannot include nonexistent state '" + rule.include + "' (in state '" + key + "')")
|
|
2570
|
+
}
|
|
2571
|
+
for (var k = 0; k < newRules.length; k++) {
|
|
2572
|
+
var newRule = newRules[k];
|
|
2573
|
+
if (rules.indexOf(newRule) !== -1) continue
|
|
2574
|
+
splice.push(newRule);
|
|
2575
|
+
}
|
|
2576
|
+
}
|
|
2577
|
+
rules.splice.apply(rules, splice);
|
|
2578
|
+
j--;
|
|
2579
|
+
}
|
|
2580
|
+
}
|
|
2581
|
+
|
|
2582
|
+
var map = Object.create(null);
|
|
2583
|
+
for (var i = 0; i < keys.length; i++) {
|
|
2584
|
+
var key = keys[i];
|
|
2585
|
+
map[key] = compileRules(ruleMap[key], true);
|
|
2586
|
+
}
|
|
2587
|
+
|
|
2588
|
+
for (var i = 0; i < keys.length; i++) {
|
|
2589
|
+
var name = keys[i];
|
|
2590
|
+
var state = map[name];
|
|
2591
|
+
var groups = state.groups;
|
|
2592
|
+
for (var j = 0; j < groups.length; j++) {
|
|
2593
|
+
checkStateGroup(groups[j], name, map);
|
|
2594
|
+
}
|
|
2595
|
+
var fastKeys = Object.getOwnPropertyNames(state.fast);
|
|
2596
|
+
for (var j = 0; j < fastKeys.length; j++) {
|
|
2597
|
+
checkStateGroup(state.fast[fastKeys[j]], name, map);
|
|
2598
|
+
}
|
|
2599
|
+
}
|
|
2600
|
+
|
|
2601
|
+
return new Lexer(map, start)
|
|
2602
|
+
}
|
|
2603
|
+
|
|
2604
|
+
function keywordTransform(map) {
|
|
2605
|
+
|
|
2606
|
+
// Use a JavaScript Map to map keywords to their corresponding token type
|
|
2607
|
+
// unless Map is unsupported, then fall back to using an Object:
|
|
2608
|
+
var isMap = typeof Map !== 'undefined';
|
|
2609
|
+
var reverseMap = isMap ? new Map : Object.create(null);
|
|
2610
|
+
|
|
2611
|
+
var types = Object.getOwnPropertyNames(map);
|
|
2612
|
+
for (var i = 0; i < types.length; i++) {
|
|
2613
|
+
var tokenType = types[i];
|
|
2614
|
+
var item = map[tokenType];
|
|
2615
|
+
var keywordList = Array.isArray(item) ? item : [item];
|
|
2616
|
+
keywordList.forEach(function(keyword) {
|
|
2617
|
+
if (typeof keyword !== 'string') {
|
|
2618
|
+
throw new Error("keyword must be string (in keyword '" + tokenType + "')")
|
|
2619
|
+
}
|
|
2620
|
+
if (isMap) {
|
|
2621
|
+
reverseMap.set(keyword, tokenType);
|
|
2622
|
+
} else {
|
|
2623
|
+
reverseMap[keyword] = tokenType;
|
|
2624
|
+
}
|
|
2625
|
+
});
|
|
2626
|
+
}
|
|
2627
|
+
return function(k) {
|
|
2628
|
+
return isMap ? reverseMap.get(k) : reverseMap[k]
|
|
2629
|
+
}
|
|
2630
|
+
}
|
|
2631
|
+
|
|
2632
|
+
/***************************************************************************/
|
|
2633
|
+
|
|
2634
|
+
var Lexer = function(states, state) {
|
|
2635
|
+
this.startState = state;
|
|
2636
|
+
this.states = states;
|
|
2637
|
+
this.buffer = '';
|
|
2638
|
+
this.stack = [];
|
|
2639
|
+
this.reset();
|
|
2640
|
+
};
|
|
2641
|
+
|
|
2642
|
+
Lexer.prototype.reset = function(data, info) {
|
|
2643
|
+
this.buffer = data || '';
|
|
2644
|
+
this.index = 0;
|
|
2645
|
+
this.line = info ? info.line : 1;
|
|
2646
|
+
this.col = info ? info.col : 1;
|
|
2647
|
+
this.queuedToken = info ? info.queuedToken : null;
|
|
2648
|
+
this.queuedText = info ? info.queuedText: "";
|
|
2649
|
+
this.queuedThrow = info ? info.queuedThrow : null;
|
|
2650
|
+
this.setState(info ? info.state : this.startState);
|
|
2651
|
+
this.stack = info && info.stack ? info.stack.slice() : [];
|
|
2652
|
+
return this
|
|
2653
|
+
};
|
|
2654
|
+
|
|
2655
|
+
Lexer.prototype.save = function() {
|
|
2656
|
+
return {
|
|
2657
|
+
line: this.line,
|
|
2658
|
+
col: this.col,
|
|
2659
|
+
state: this.state,
|
|
2660
|
+
stack: this.stack.slice(),
|
|
2661
|
+
queuedToken: this.queuedToken,
|
|
2662
|
+
queuedText: this.queuedText,
|
|
2663
|
+
queuedThrow: this.queuedThrow,
|
|
2664
|
+
}
|
|
2665
|
+
};
|
|
2666
|
+
|
|
2667
|
+
Lexer.prototype.setState = function(state) {
|
|
2668
|
+
if (!state || this.state === state) return
|
|
2669
|
+
this.state = state;
|
|
2670
|
+
var info = this.states[state];
|
|
2671
|
+
this.groups = info.groups;
|
|
2672
|
+
this.error = info.error;
|
|
2673
|
+
this.re = info.regexp;
|
|
2674
|
+
this.fast = info.fast;
|
|
2675
|
+
};
|
|
2676
|
+
|
|
2677
|
+
Lexer.prototype.popState = function() {
|
|
2678
|
+
this.setState(this.stack.pop());
|
|
2679
|
+
};
|
|
2680
|
+
|
|
2681
|
+
Lexer.prototype.pushState = function(state) {
|
|
2682
|
+
this.stack.push(this.state);
|
|
2683
|
+
this.setState(state);
|
|
2684
|
+
};
|
|
2685
|
+
|
|
2686
|
+
var eat = hasSticky ? function(re, buffer) { // assume re is /y
|
|
2687
|
+
return re.exec(buffer)
|
|
2688
|
+
} : function(re, buffer) { // assume re is /g
|
|
2689
|
+
var match = re.exec(buffer);
|
|
2690
|
+
// will always match, since we used the |(?:) trick
|
|
2691
|
+
if (match[0].length === 0) {
|
|
2692
|
+
return null
|
|
2693
|
+
}
|
|
2694
|
+
return match
|
|
2695
|
+
};
|
|
2696
|
+
|
|
2697
|
+
Lexer.prototype._getGroup = function(match) {
|
|
2698
|
+
var groupCount = this.groups.length;
|
|
2699
|
+
for (var i = 0; i < groupCount; i++) {
|
|
2700
|
+
if (match[i + 1] !== undefined) {
|
|
2701
|
+
return this.groups[i]
|
|
2702
|
+
}
|
|
2703
|
+
}
|
|
2704
|
+
throw new Error('Cannot find token type for matched text')
|
|
2705
|
+
};
|
|
2706
|
+
|
|
2707
|
+
function tokenToString() {
|
|
2708
|
+
return this.value
|
|
2709
|
+
}
|
|
2710
|
+
|
|
2711
|
+
Lexer.prototype.next = function() {
|
|
2712
|
+
var index = this.index;
|
|
2713
|
+
|
|
2714
|
+
// If a fallback token matched, we don't need to re-run the RegExp
|
|
2715
|
+
if (this.queuedGroup) {
|
|
2716
|
+
var token = this._token(this.queuedGroup, this.queuedText, index);
|
|
2717
|
+
this.queuedGroup = null;
|
|
2718
|
+
this.queuedText = "";
|
|
2719
|
+
return token
|
|
2720
|
+
}
|
|
2721
|
+
|
|
2722
|
+
var buffer = this.buffer;
|
|
2723
|
+
if (index === buffer.length) {
|
|
2724
|
+
return // EOF
|
|
2725
|
+
}
|
|
2726
|
+
|
|
2727
|
+
// Fast matching for single characters
|
|
2728
|
+
var group = this.fast[buffer.charCodeAt(index)];
|
|
2729
|
+
if (group) {
|
|
2730
|
+
return this._token(group, buffer.charAt(index), index)
|
|
2731
|
+
}
|
|
2732
|
+
|
|
2733
|
+
// Execute RegExp
|
|
2734
|
+
var re = this.re;
|
|
2735
|
+
re.lastIndex = index;
|
|
2736
|
+
var match = eat(re, buffer);
|
|
2737
|
+
|
|
2738
|
+
// Error tokens match the remaining buffer
|
|
2739
|
+
var error = this.error;
|
|
2740
|
+
if (match == null) {
|
|
2741
|
+
return this._token(error, buffer.slice(index, buffer.length), index)
|
|
2742
|
+
}
|
|
2743
|
+
|
|
2744
|
+
var group = this._getGroup(match);
|
|
2745
|
+
var text = match[0];
|
|
2746
|
+
|
|
2747
|
+
if (error.fallback && match.index !== index) {
|
|
2748
|
+
this.queuedGroup = group;
|
|
2749
|
+
this.queuedText = text;
|
|
2750
|
+
|
|
2751
|
+
// Fallback tokens contain the unmatched portion of the buffer
|
|
2752
|
+
return this._token(error, buffer.slice(index, match.index), index)
|
|
2753
|
+
}
|
|
2754
|
+
|
|
2755
|
+
return this._token(group, text, index)
|
|
2756
|
+
};
|
|
2757
|
+
|
|
2758
|
+
Lexer.prototype._token = function(group, text, offset) {
|
|
2759
|
+
// count line breaks
|
|
2760
|
+
var lineBreaks = 0;
|
|
2761
|
+
if (group.lineBreaks) {
|
|
2762
|
+
var matchNL = /\n/g;
|
|
2763
|
+
var nl = 1;
|
|
2764
|
+
if (text === '\n') {
|
|
2765
|
+
lineBreaks = 1;
|
|
2766
|
+
} else {
|
|
2767
|
+
while (matchNL.exec(text)) { lineBreaks++; nl = matchNL.lastIndex; }
|
|
2768
|
+
}
|
|
2769
|
+
}
|
|
2770
|
+
|
|
2771
|
+
var token = {
|
|
2772
|
+
type: (typeof group.type === 'function' && group.type(text)) || group.defaultType,
|
|
2773
|
+
value: typeof group.value === 'function' ? group.value(text) : text,
|
|
2774
|
+
text: text,
|
|
2775
|
+
toString: tokenToString,
|
|
2776
|
+
offset: offset,
|
|
2777
|
+
lineBreaks: lineBreaks,
|
|
2778
|
+
line: this.line,
|
|
2779
|
+
col: this.col,
|
|
2780
|
+
};
|
|
2781
|
+
// nb. adding more props to token object will make V8 sad!
|
|
2782
|
+
|
|
2783
|
+
var size = text.length;
|
|
2784
|
+
this.index += size;
|
|
2785
|
+
this.line += lineBreaks;
|
|
2786
|
+
if (lineBreaks !== 0) {
|
|
2787
|
+
this.col = size - nl + 1;
|
|
2788
|
+
} else {
|
|
2789
|
+
this.col += size;
|
|
2790
|
+
}
|
|
2791
|
+
|
|
2792
|
+
// throw, if no rule with {error: true}
|
|
2793
|
+
if (group.shouldThrow) {
|
|
2794
|
+
var err = new Error(this.formatError(token, "invalid syntax"));
|
|
2795
|
+
throw err;
|
|
2796
|
+
}
|
|
2797
|
+
|
|
2798
|
+
if (group.pop) this.popState();
|
|
2799
|
+
else if (group.push) this.pushState(group.push);
|
|
2800
|
+
else if (group.next) this.setState(group.next);
|
|
2801
|
+
|
|
2802
|
+
return token
|
|
2803
|
+
};
|
|
2804
|
+
|
|
2805
|
+
if (typeof Symbol !== 'undefined' && Symbol.iterator) {
|
|
2806
|
+
var LexerIterator = function(lexer) {
|
|
2807
|
+
this.lexer = lexer;
|
|
2808
|
+
};
|
|
2809
|
+
|
|
2810
|
+
LexerIterator.prototype.next = function() {
|
|
2811
|
+
var token = this.lexer.next();
|
|
2812
|
+
return {value: token, done: !token}
|
|
2813
|
+
};
|
|
2814
|
+
|
|
2815
|
+
LexerIterator.prototype[Symbol.iterator] = function() {
|
|
2816
|
+
return this
|
|
2817
|
+
};
|
|
2818
|
+
|
|
2819
|
+
Lexer.prototype[Symbol.iterator] = function() {
|
|
2820
|
+
return new LexerIterator(this)
|
|
2821
|
+
};
|
|
2822
|
+
}
|
|
2823
|
+
|
|
2824
|
+
Lexer.prototype.formatError = function(token, message) {
|
|
2825
|
+
if (token == null) {
|
|
2826
|
+
// An undefined token indicates EOF
|
|
2827
|
+
var text = this.buffer.slice(this.index);
|
|
2828
|
+
var token = {
|
|
2829
|
+
text: text,
|
|
2830
|
+
offset: this.index,
|
|
2831
|
+
lineBreaks: text.indexOf('\n') === -1 ? 0 : 1,
|
|
2832
|
+
line: this.line,
|
|
2833
|
+
col: this.col,
|
|
2834
|
+
};
|
|
2835
|
+
}
|
|
2836
|
+
|
|
2837
|
+
var numLinesAround = 2;
|
|
2838
|
+
var firstDisplayedLine = Math.max(token.line - numLinesAround, 1);
|
|
2839
|
+
var lastDisplayedLine = token.line + numLinesAround;
|
|
2840
|
+
var lastLineDigits = String(lastDisplayedLine).length;
|
|
2841
|
+
var displayedLines = lastNLines(
|
|
2842
|
+
this.buffer,
|
|
2843
|
+
(this.line - token.line) + numLinesAround + 1
|
|
2844
|
+
)
|
|
2845
|
+
.slice(0, 5);
|
|
2846
|
+
var errorLines = [];
|
|
2847
|
+
errorLines.push(message + " at line " + token.line + " col " + token.col + ":");
|
|
2848
|
+
errorLines.push("");
|
|
2849
|
+
for (var i = 0; i < displayedLines.length; i++) {
|
|
2850
|
+
var line = displayedLines[i];
|
|
2851
|
+
var lineNo = firstDisplayedLine + i;
|
|
2852
|
+
errorLines.push(pad(String(lineNo), lastLineDigits) + " " + line);
|
|
2853
|
+
if (lineNo === token.line) {
|
|
2854
|
+
errorLines.push(pad("", lastLineDigits + token.col + 1) + "^");
|
|
2855
|
+
}
|
|
2856
|
+
}
|
|
2857
|
+
return errorLines.join("\n")
|
|
2858
|
+
};
|
|
2859
|
+
|
|
2860
|
+
Lexer.prototype.clone = function() {
|
|
2861
|
+
return new Lexer(this.states, this.state)
|
|
2862
|
+
};
|
|
2863
|
+
|
|
2864
|
+
Lexer.prototype.has = function(tokenType) {
|
|
2865
|
+
return true
|
|
2866
|
+
};
|
|
2867
|
+
|
|
2868
|
+
|
|
2869
|
+
return {
|
|
2870
|
+
compile: compile,
|
|
2871
|
+
states: compileStates,
|
|
2872
|
+
error: Object.freeze({error: true}),
|
|
2873
|
+
fallback: Object.freeze({fallback: true}),
|
|
2874
|
+
keywords: keywordTransform,
|
|
2875
|
+
}
|
|
2876
|
+
|
|
2877
|
+
}));
|
|
2878
|
+
} (moo$1));
|
|
2879
|
+
|
|
2880
|
+
const moo = moo$1.exports;
|
|
2881
|
+
|
|
2882
|
+
|
|
2883
|
+
const digit = `[0-9]`;
|
|
2884
|
+
const digit19 = `[1-9]`;
|
|
2885
|
+
const hexdig = `[0-9a-fA-F]`;
|
|
2886
|
+
|
|
2887
|
+
// String
|
|
2888
|
+
const unescaped = `[\\x20-\\x21\\x23-\\x5b\\x5d-\\u{10ffff}]`;
|
|
2889
|
+
const escape = `\\\\`;
|
|
2890
|
+
const escaped = `${escape}(?:["\\/\\\\brfnt]|u${hexdig}{4})`;
|
|
2891
|
+
const char = `(?:${unescaped}|${escaped})`;
|
|
2892
|
+
const string = `"${char}*"`;
|
|
2893
|
+
|
|
2894
|
+
// Number
|
|
2895
|
+
const int = `(?:0|${digit19}${digit}*)`;
|
|
2896
|
+
const frac = `\\.${digit}+`;
|
|
2897
|
+
const e = `[eE]`;
|
|
2898
|
+
const exp = `${e}[-+]?${digit}+`;
|
|
2899
|
+
const number = `-?${int}(?:${frac})?(?:${exp})?`;
|
|
2900
|
+
|
|
2901
|
+
// Whitespace
|
|
2902
|
+
const whitespace = `(?:(?:\\r?\\n)|[ \\t])+`;
|
|
2903
|
+
|
|
2904
|
+
var lexer = (json) => {
|
|
2905
|
+
const lexer = moo.states({
|
|
2906
|
+
main: {
|
|
2907
|
+
WS: { match: new RegExp(whitespace, "u"), lineBreaks: true },
|
|
2908
|
+
true: { match: "true", value: () => true },
|
|
2909
|
+
false: { match: "false", value: () => false },
|
|
2910
|
+
null: { match: "null", value: () => null },
|
|
2911
|
+
number: { match: new RegExp(number, "u"), value: parseFloat },
|
|
2912
|
+
string: { match: new RegExp(string, "u"), value: JSON.parse },
|
|
2913
|
+
"{": "{",
|
|
2914
|
+
"}": "}",
|
|
2915
|
+
"[": "[",
|
|
2916
|
+
"]": "]",
|
|
2917
|
+
":": ":",
|
|
2918
|
+
",": ",",
|
|
2919
|
+
error: moo.error
|
|
2920
|
+
}
|
|
2921
|
+
});
|
|
2922
|
+
lexer.reset(json);
|
|
2923
|
+
|
|
2924
|
+
const _next = () => {
|
|
2925
|
+
let token;
|
|
2926
|
+
do {
|
|
2927
|
+
token = lexer.next();
|
|
2928
|
+
if (token?.type === "error") {
|
|
2929
|
+
throw SyntaxError(lexer.formatError(token, "Unrecognized token"));
|
|
2930
|
+
}
|
|
2931
|
+
} while (token?.type === "WS");
|
|
2932
|
+
|
|
2933
|
+
return token;
|
|
2934
|
+
};
|
|
2935
|
+
|
|
2936
|
+
let previous;
|
|
2937
|
+
let nextToken = _next();
|
|
2938
|
+
|
|
2939
|
+
const next = (expectedType = undefined) => {
|
|
2940
|
+
previous = nextToken;
|
|
2941
|
+
nextToken = _next();
|
|
2942
|
+
if (expectedType && previous?.type !== expectedType) {
|
|
2943
|
+
throw SyntaxError(lexer.formatError(previous, `Expected a '${expectedType}'`));
|
|
2944
|
+
}
|
|
2945
|
+
return previous;
|
|
2946
|
+
};
|
|
2947
|
+
|
|
2948
|
+
const peek = () => nextToken;
|
|
2949
|
+
|
|
2950
|
+
const defaultErrorToken = { offset: 0, line: 1, col: 0, text: "" };
|
|
2951
|
+
const syntaxError = (message) => {
|
|
2952
|
+
const referenceToken = previous || defaultErrorToken;
|
|
2953
|
+
const errorToken = {
|
|
2954
|
+
...referenceToken,
|
|
2955
|
+
offset: referenceToken.offset + referenceToken.text.length,
|
|
2956
|
+
col: referenceToken.col + referenceToken.text.length
|
|
2957
|
+
};
|
|
2958
|
+
throw new SyntaxError(lexer.formatError(errorToken, message));
|
|
2959
|
+
};
|
|
2960
|
+
|
|
2961
|
+
return { next, peek, syntaxError };
|
|
2962
|
+
};
|
|
2963
|
+
|
|
2964
|
+
const JsonPointer$2 = lib$4;
|
|
2965
|
+
const jsonLexer = lexer;
|
|
2966
|
+
|
|
2967
|
+
|
|
2968
|
+
const defaultReviver = (key, value) => value;
|
|
2969
|
+
const parse$3 = (json, reviver = defaultReviver) => {
|
|
2970
|
+
const lexer = jsonLexer(json);
|
|
2971
|
+
const value = parseValue(lexer, "", JsonPointer$2.nil, reviver);
|
|
2972
|
+
|
|
2973
|
+
const token = lexer.peek();
|
|
2974
|
+
if (token) {
|
|
2975
|
+
lexer.syntaxError("A value has been parsed, but more tokens were found");
|
|
2976
|
+
}
|
|
2977
|
+
return value;
|
|
2978
|
+
};
|
|
2979
|
+
|
|
2980
|
+
const parseValue = (lexer, key, pointer, reviver) => {
|
|
2981
|
+
let value;
|
|
2982
|
+
const token = lexer.next();
|
|
2983
|
+
switch (token?.type) {
|
|
2984
|
+
case "true":
|
|
2985
|
+
case "false":
|
|
2986
|
+
case "null":
|
|
2987
|
+
case "number":
|
|
2988
|
+
case "string":
|
|
2989
|
+
value = token.value;
|
|
2990
|
+
break;
|
|
2991
|
+
case "{":
|
|
2992
|
+
value = parseObject(lexer, key, pointer, reviver);
|
|
2993
|
+
break;
|
|
2994
|
+
case "[":
|
|
2995
|
+
value = parseArray(lexer, key, pointer, reviver);
|
|
2996
|
+
break;
|
|
2997
|
+
default:
|
|
2998
|
+
lexer.syntaxError("Expected a JSON value");
|
|
2999
|
+
}
|
|
3000
|
+
|
|
3001
|
+
return reviver(key, value, pointer);
|
|
3002
|
+
};
|
|
3003
|
+
|
|
3004
|
+
const parseObject = (lexer, key, pointer, reviver) => {
|
|
3005
|
+
const value = {};
|
|
3006
|
+
|
|
3007
|
+
if (lexer.peek()?.type !== "}") {
|
|
3008
|
+
parseProperties(lexer, key, pointer, reviver, value);
|
|
3009
|
+
}
|
|
3010
|
+
|
|
3011
|
+
lexer.next("}");
|
|
3012
|
+
|
|
3013
|
+
return value;
|
|
3014
|
+
};
|
|
3015
|
+
|
|
3016
|
+
const parseProperties = (lexer, key, pointer, reviver, value) => {
|
|
3017
|
+
const propertyName = lexer.next("string").value;
|
|
3018
|
+
lexer.next(":");
|
|
3019
|
+
if (!isValueToken(lexer.peek())) {
|
|
3020
|
+
lexer.syntaxError("Expected a JSON value");
|
|
3021
|
+
}
|
|
3022
|
+
value[propertyName] = parseValue(lexer, propertyName, JsonPointer$2.append(propertyName, pointer), reviver);
|
|
3023
|
+
|
|
3024
|
+
if (lexer.peek()?.type === ",") {
|
|
3025
|
+
lexer.next(); // burn comma
|
|
3026
|
+
parseProperties(lexer, propertyName, pointer, reviver, value);
|
|
3027
|
+
} else if (isValueToken(lexer.peek())) {
|
|
3028
|
+
lexer.next(",");
|
|
3029
|
+
}
|
|
3030
|
+
};
|
|
3031
|
+
|
|
3032
|
+
const parseArray = (lexer, key, pointer, reviver) => {
|
|
3033
|
+
const value = [];
|
|
3034
|
+
|
|
3035
|
+
if (lexer.peek()?.type !== "]") {
|
|
3036
|
+
parseItems(lexer, 0, pointer, reviver, value);
|
|
3037
|
+
}
|
|
3038
|
+
|
|
3039
|
+
lexer.next("]");
|
|
3040
|
+
|
|
3041
|
+
return value;
|
|
3042
|
+
};
|
|
3043
|
+
|
|
3044
|
+
const parseItems = (lexer, key, pointer, reviver, value) => {
|
|
3045
|
+
if (!isValueToken(lexer.peek())) {
|
|
3046
|
+
lexer.syntaxError("Expected a JSON value");
|
|
3047
|
+
}
|
|
3048
|
+
value[key] = parseValue(lexer, key, JsonPointer$2.append(key, pointer), reviver);
|
|
3049
|
+
if (lexer.peek()?.type === ",") {
|
|
3050
|
+
lexer.next(); // burn comma
|
|
3051
|
+
parseItems(lexer, key + 1, pointer, reviver, value);
|
|
3052
|
+
} else if (isValueToken(lexer.peek())) {
|
|
3053
|
+
lexer.next(",");
|
|
3054
|
+
}
|
|
3055
|
+
};
|
|
3056
|
+
|
|
3057
|
+
const valueType = new Set(["string", "number", "true", "false", "null", "[", "{"]);
|
|
3058
|
+
const isValueToken = (token) => valueType.has(token?.type);
|
|
3059
|
+
|
|
3060
|
+
var parse_1 = parse$3;
|
|
3061
|
+
|
|
3062
|
+
const JsonPointer$1 = lib$4;
|
|
3063
|
+
|
|
3064
|
+
|
|
3065
|
+
const defaultReplacer = (key, value) => value;
|
|
3066
|
+
const stringify$2 = (value, replacer = defaultReplacer, space = "") => {
|
|
3067
|
+
return stringifyValue(value, replacer, space, "", JsonPointer$1.nil, 1);
|
|
3068
|
+
};
|
|
3069
|
+
|
|
3070
|
+
const stringifyValue = (value, replacer, space, key, pointer, depth) => {
|
|
3071
|
+
value = replacer(key, value, pointer);
|
|
3072
|
+
let result;
|
|
3073
|
+
if (Array.isArray(value)) {
|
|
3074
|
+
result = stringifyArray(value, replacer, space, pointer, depth);
|
|
3075
|
+
} else if (typeof value === "object" && value !== null) {
|
|
3076
|
+
result = stringifyObject(value, replacer, space, pointer, depth);
|
|
3077
|
+
} else {
|
|
3078
|
+
result = JSON.stringify(value);
|
|
3079
|
+
}
|
|
3080
|
+
|
|
3081
|
+
return result;
|
|
3082
|
+
};
|
|
3083
|
+
|
|
3084
|
+
const stringifyArray = (value, replacer, space, pointer, depth) => {
|
|
3085
|
+
if (value.length === 0) {
|
|
3086
|
+
space = "";
|
|
3087
|
+
}
|
|
3088
|
+
const padding = space ? `\n${space.repeat(depth - 1)}` : "";
|
|
3089
|
+
return "[" + padding + space + value
|
|
3090
|
+
.map((item, index) => {
|
|
3091
|
+
const indexPointer = JsonPointer$1.append(index, pointer);
|
|
3092
|
+
return stringifyValue(item, replacer, space, index, indexPointer, depth + 1);
|
|
3093
|
+
})
|
|
3094
|
+
.join(`,${padding}${space}`) + padding + "]";
|
|
3095
|
+
};
|
|
3096
|
+
|
|
3097
|
+
const stringifyObject = (value, replacer, space, pointer, depth) => {
|
|
3098
|
+
if (Object.keys(value).length === 0) {
|
|
3099
|
+
space = "";
|
|
3100
|
+
}
|
|
3101
|
+
const padding = space ? `\n${space.repeat(depth - 1)}` : "";
|
|
3102
|
+
const spacing = space ? " " : "";
|
|
3103
|
+
return "{" + padding + space + Object.entries(value)
|
|
3104
|
+
.map(([key, value]) => {
|
|
3105
|
+
const keyPointer = JsonPointer$1.append(key, pointer);
|
|
3106
|
+
return JSON.stringify(key) + ":" + spacing + stringifyValue(value, replacer, space, key, keyPointer, depth + 1);
|
|
3107
|
+
})
|
|
3108
|
+
.join(`,${padding}${space}`) + padding + "}";
|
|
3109
|
+
};
|
|
3110
|
+
|
|
3111
|
+
var stringify_1 = stringify$2;
|
|
3112
|
+
|
|
3113
|
+
const parse$2 = parse_1;
|
|
3114
|
+
const stringify$1 = stringify_1;
|
|
3115
|
+
|
|
3116
|
+
|
|
3117
|
+
var lib$2 = { parse: parse$2, stringify: stringify$1 };
|
|
3118
|
+
|
|
2236
3119
|
var fetch_browser = fetch;
|
|
2237
3120
|
|
|
2238
3121
|
var contentType = {};
|
|
@@ -2488,8 +3371,9 @@ define(['exports'], (function (exports) { 'use strict';
|
|
|
2488
3371
|
var mediaTypes = { addPlugin, parse, getContentType };
|
|
2489
3372
|
|
|
2490
3373
|
const curry$1 = justCurryIt$1;
|
|
2491
|
-
const Pact$a = lib$
|
|
2492
|
-
const
|
|
3374
|
+
const Pact$a = lib$3;
|
|
3375
|
+
const Json = lib$2;
|
|
3376
|
+
const JsonPointer = lib$4;
|
|
2493
3377
|
const { jsonTypeOf, resolveUrl: resolveUrl$1, urlFragment, pathRelative } = common$1;
|
|
2494
3378
|
const fetch$1 = fetch_browser;
|
|
2495
3379
|
const Reference$1 = reference;
|
|
@@ -2771,36 +3655,43 @@ define(['exports'], (function (exports) { 'use strict';
|
|
|
2771
3655
|
const toSchema = (schemaDoc, options = {}) => {
|
|
2772
3656
|
const fullOptions = { ...toSchemaDefaultOptions, ...options };
|
|
2773
3657
|
|
|
2774
|
-
const
|
|
2775
|
-
|
|
2776
|
-
|
|
3658
|
+
const anchorToken = getConfig(schemaDoc.dialectId, "anchorToken");
|
|
3659
|
+
const dynamicAnchorToken = getConfig(schemaDoc.dialectId, "dynamicAnchorToken");
|
|
3660
|
+
|
|
3661
|
+
const anchors = {};
|
|
3662
|
+
for (const anchor in schemaDoc.anchors) {
|
|
3663
|
+
if (anchor !== "" && !schemaDoc.dynamicAnchors[anchor]) {
|
|
3664
|
+
anchors[schemaDoc.anchors[anchor]] = anchor;
|
|
2777
3665
|
}
|
|
3666
|
+
}
|
|
2778
3667
|
|
|
2779
|
-
|
|
2780
|
-
|
|
2781
|
-
const
|
|
2782
|
-
|
|
2783
|
-
|
|
3668
|
+
const dynamicAnchors = {};
|
|
3669
|
+
for (const anchor in schemaDoc.dynamicAnchors) {
|
|
3670
|
+
const pointer = urlFragment(schemaDoc.dynamicAnchors[anchor]);
|
|
3671
|
+
dynamicAnchors[pointer] = anchor;
|
|
3672
|
+
}
|
|
3673
|
+
|
|
3674
|
+
const schema = JSON.parse(Json.stringify(schemaDoc.schema, (key, value, pointer) => {
|
|
3675
|
+
if (Reference$1.isReference(value)) {
|
|
3676
|
+
const refValue = Reference$1.value(value);
|
|
3677
|
+
const embeddedDialect = typeof refValue.$schema === "string" ? resolveUrl$1(refValue.$schema, "") : schemaDoc.dialectId;
|
|
3678
|
+
const embeddedToken = getConfig(embeddedDialect, "embeddedToken");
|
|
3679
|
+
if (!fullOptions.includeEmbedded && embeddedToken in refValue) {
|
|
3680
|
+
return;
|
|
3681
|
+
} else {
|
|
3682
|
+
return Reference$1.value(value);
|
|
3683
|
+
}
|
|
2784
3684
|
} else {
|
|
2785
|
-
|
|
3685
|
+
if (pointer in anchors) {
|
|
3686
|
+
value = { [anchorToken]: anchors[pointer], ...value };
|
|
3687
|
+
}
|
|
3688
|
+
if (pointer in dynamicAnchors) {
|
|
3689
|
+
value = { [dynamicAnchorToken]: dynamicAnchors[pointer], ...value };
|
|
3690
|
+
}
|
|
3691
|
+
return value;
|
|
2786
3692
|
}
|
|
2787
3693
|
}));
|
|
2788
3694
|
|
|
2789
|
-
const dynamicAnchorToken = getConfig(schemaDoc.dialectId, "dynamicAnchorToken");
|
|
2790
|
-
Object.entries(schemaDoc.dynamicAnchors)
|
|
2791
|
-
.forEach(([anchor, uri]) => {
|
|
2792
|
-
const pointer = JsonPointer.append(dynamicAnchorToken, urlFragment(uri));
|
|
2793
|
-
JsonPointer.assign(pointer, schema, anchor);
|
|
2794
|
-
});
|
|
2795
|
-
|
|
2796
|
-
const anchorToken = getConfig(schemaDoc.dialectId, "anchorToken");
|
|
2797
|
-
Object.entries(schemaDoc.anchors)
|
|
2798
|
-
.filter(([anchor]) => anchor !== "" && !(anchor in schemaDoc.dynamicAnchors))
|
|
2799
|
-
.forEach(([anchor, pointer]) => {
|
|
2800
|
-
const anchorPointer = JsonPointer.append(anchorToken, pointer);
|
|
2801
|
-
JsonPointer.assign(anchorPointer, schema, anchor);
|
|
2802
|
-
});
|
|
2803
|
-
|
|
2804
3695
|
const baseToken = getConfig(schemaDoc.dialectId, "baseToken");
|
|
2805
3696
|
const id = relativeUri(fullOptions.parentId, schemaDoc.id);
|
|
2806
3697
|
const dialect = fullOptions.parentDialect === schemaDoc.dialectId ? "" : schemaDoc.dialectId;
|
|
@@ -3051,7 +3942,7 @@ define(['exports'], (function (exports) { 'use strict';
|
|
|
3051
3942
|
addMediaTypePlugin: MediaTypes.addPlugin
|
|
3052
3943
|
};
|
|
3053
3944
|
|
|
3054
|
-
const Pact$9 = lib$
|
|
3945
|
+
const Pact$9 = lib$3;
|
|
3055
3946
|
const PubSub = pubsub.exports;
|
|
3056
3947
|
const Core$x = core$2;
|
|
3057
3948
|
const Instance$B = instance;
|
|
@@ -3282,7 +4173,7 @@ define(['exports'], (function (exports) { 'use strict';
|
|
|
3282
4173
|
var additionalProperties6 = { compile: compile$G, interpret: interpret$G, collectEvaluatedProperties: collectEvaluatedProperties$c };
|
|
3283
4174
|
|
|
3284
4175
|
const { Core: Core$r, Schema: Schema$H } = lib$1;
|
|
3285
|
-
const Pact$8 = lib$
|
|
4176
|
+
const Pact$8 = lib$3;
|
|
3286
4177
|
|
|
3287
4178
|
|
|
3288
4179
|
const compile$F = (schema, ast) => Pact$8.pipeline([
|
|
@@ -3311,7 +4202,7 @@ define(['exports'], (function (exports) { 'use strict';
|
|
|
3311
4202
|
var allOf = { compile: compile$F, interpret: interpret$F, collectEvaluatedProperties: collectEvaluatedProperties$b, collectEvaluatedItems: collectEvaluatedItems$c };
|
|
3312
4203
|
|
|
3313
4204
|
const { Core: Core$q, Schema: Schema$G } = lib$1;
|
|
3314
|
-
const Pact$7 = lib$
|
|
4205
|
+
const Pact$7 = lib$3;
|
|
3315
4206
|
|
|
3316
4207
|
|
|
3317
4208
|
const compile$E = (schema, ast) => Pact$7.pipeline([
|
|
@@ -3454,7 +4345,7 @@ define(['exports'], (function (exports) { 'use strict';
|
|
|
3454
4345
|
var containsMinContainsMaxContains = { compile: compile$B, interpret: interpret$B, collectEvaluatedItems: collectEvaluatedItems$a };
|
|
3455
4346
|
|
|
3456
4347
|
const { Core: Core$n, Schema: Schema$D } = lib$1;
|
|
3457
|
-
const Pact$6 = lib$
|
|
4348
|
+
const Pact$6 = lib$3;
|
|
3458
4349
|
|
|
3459
4350
|
|
|
3460
4351
|
const compile$A = async (schema, ast) => {
|
|
@@ -3470,7 +4361,7 @@ define(['exports'], (function (exports) { 'use strict';
|
|
|
3470
4361
|
var definitions = { compile: compile$A, interpret: interpret$A };
|
|
3471
4362
|
|
|
3472
4363
|
const { Core: Core$m, Schema: Schema$C, Instance: Instance$s } = lib$1;
|
|
3473
|
-
const Pact$5 = lib$
|
|
4364
|
+
const Pact$5 = lib$3;
|
|
3474
4365
|
|
|
3475
4366
|
|
|
3476
4367
|
const compile$z = (schema, ast) => Pact$5.pipeline([
|
|
@@ -3500,7 +4391,7 @@ define(['exports'], (function (exports) { 'use strict';
|
|
|
3500
4391
|
var dependencies = { compile: compile$z, interpret: interpret$z };
|
|
3501
4392
|
|
|
3502
4393
|
const { Schema: Schema$B, Instance: Instance$r } = lib$1;
|
|
3503
|
-
const Pact$4 = lib$
|
|
4394
|
+
const Pact$4 = lib$3;
|
|
3504
4395
|
|
|
3505
4396
|
|
|
3506
4397
|
const compile$y = (schema) => Pact$4.pipeline([
|
|
@@ -3520,7 +4411,7 @@ define(['exports'], (function (exports) { 'use strict';
|
|
|
3520
4411
|
var dependentRequired = { compile: compile$y, interpret: interpret$y };
|
|
3521
4412
|
|
|
3522
4413
|
const { Core: Core$l, Schema: Schema$A, Instance: Instance$q } = lib$1;
|
|
3523
|
-
const Pact$3 = lib$
|
|
4414
|
+
const Pact$3 = lib$3;
|
|
3524
4415
|
|
|
3525
4416
|
|
|
3526
4417
|
const compile$x = (schema, ast) => Pact$3.pipeline([
|
|
@@ -3929,7 +4820,7 @@ define(['exports'], (function (exports) { 'use strict';
|
|
|
3929
4820
|
var pattern = { compile: compile$b, interpret: interpret$b };
|
|
3930
4821
|
|
|
3931
4822
|
const { Core: Core$d, Schema: Schema$f, Instance: Instance$8 } = lib$1;
|
|
3932
|
-
const Pact$2 = lib$
|
|
4823
|
+
const Pact$2 = lib$3;
|
|
3933
4824
|
|
|
3934
4825
|
|
|
3935
4826
|
const compile$a = (schema, ast) => Pact$2.pipeline([
|
|
@@ -3967,7 +4858,7 @@ define(['exports'], (function (exports) { 'use strict';
|
|
|
3967
4858
|
var common = { isObject, escapeRegExp: escapeRegExp$1, splitUrl: splitUrl$1 };
|
|
3968
4859
|
|
|
3969
4860
|
const { Core: Core$c, Schema: Schema$e, Instance: Instance$7 } = lib$1;
|
|
3970
|
-
const Pact$1 = lib$
|
|
4861
|
+
const Pact$1 = lib$3;
|
|
3971
4862
|
const { escapeRegExp } = common;
|
|
3972
4863
|
|
|
3973
4864
|
|
|
@@ -4055,7 +4946,7 @@ define(['exports'], (function (exports) { 'use strict';
|
|
|
4055
4946
|
var required = { compile: compile$5, interpret: interpret$5 };
|
|
4056
4947
|
|
|
4057
4948
|
const { Core: Core$8, Schema: Schema$a, Instance: Instance$4 } = lib$1;
|
|
4058
|
-
const Pact = lib$
|
|
4949
|
+
const Pact = lib$3;
|
|
4059
4950
|
|
|
4060
4951
|
|
|
4061
4952
|
const compile$4 = (schema, ast) => {
|