@hyperjump/json-schema 0.23.2 → 0.23.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/json-schema-amd.js +891 -42
- package/dist/json-schema-amd.js.map +1 -1
- package/dist/json-schema-amd.min.js +2 -2
- package/dist/json-schema-amd.min.js.map +1 -1
- package/dist/json-schema-cjs.js +891 -42
- package/dist/json-schema-cjs.js.map +1 -1
- package/dist/json-schema-cjs.min.js +2 -2
- package/dist/json-schema-cjs.min.js.map +1 -1
- package/dist/json-schema-esm.js +891 -42
- package/dist/json-schema-esm.js.map +1 -1
- package/dist/json-schema-esm.min.js +2 -2
- package/dist/json-schema-esm.min.js.map +1 -1
- package/dist/json-schema-iife.js +891 -42
- package/dist/json-schema-iife.js.map +1 -1
- package/dist/json-schema-iife.min.js +2 -2
- package/dist/json-schema-iife.min.js.map +1 -1
- package/dist/json-schema-system.js +891 -42
- package/dist/json-schema-system.js.map +1 -1
- package/dist/json-schema-system.min.js +2 -2
- package/dist/json-schema-system.min.js.map +1 -1
- package/dist/json-schema-umd.js +891 -42
- package/dist/json-schema-umd.js.map +1 -1
- package/dist/json-schema-umd.min.js +2 -2
- package/dist/json-schema-umd.min.js.map +1 -1
- package/lib/common.d.ts +1 -0
- package/lib/draft-04.d.ts +3 -2
- package/lib/draft-06.d.ts +3 -2
- package/lib/draft-07.d.ts +3 -2
- package/lib/draft-2019-09.d.ts +3 -2
- package/lib/draft-2020-12.d.ts +3 -2
- package/package.json +1 -1
package/dist/json-schema-umd.js
CHANGED
|
@@ -2020,9 +2020,9 @@
|
|
|
2020
2020
|
}
|
|
2021
2021
|
};
|
|
2022
2022
|
|
|
2023
|
-
const append = curry$a((segment, pointer) => pointer + "/" + escape(segment));
|
|
2023
|
+
const append = curry$a((segment, pointer) => pointer + "/" + escape$1(segment));
|
|
2024
2024
|
|
|
2025
|
-
const escape = (segment) => segment.toString().replace(/~/g, "~0").replace(/\//g, "~1");
|
|
2025
|
+
const escape$1 = (segment) => segment.toString().replace(/~/g, "~0").replace(/\//g, "~1");
|
|
2026
2026
|
const unescape = (segment) => segment.toString().replace(/~1/g, "/").replace(/~0/g, "~");
|
|
2027
2027
|
const computeSegment = (value, segment) => Array.isArray(value) && segment === "-" ? value.length : segment;
|
|
2028
2028
|
|
|
@@ -2041,7 +2041,7 @@
|
|
|
2041
2041
|
|
|
2042
2042
|
const isScalar = (value) => value === null || typeof value !== "object";
|
|
2043
2043
|
|
|
2044
|
-
var lib$
|
|
2044
|
+
var lib$4 = { nil: nil$2, append, get: get$2, set, assign, unset, remove };
|
|
2045
2045
|
|
|
2046
2046
|
const $__value = Symbol("$__value");
|
|
2047
2047
|
const $__href = Symbol("$__href");
|
|
@@ -2057,7 +2057,7 @@
|
|
|
2057
2057
|
|
|
2058
2058
|
var reference = { cons: cons$1, isReference, href, value: value$2 };
|
|
2059
2059
|
|
|
2060
|
-
const JsonPointer$
|
|
2060
|
+
const JsonPointer$3 = lib$4;
|
|
2061
2061
|
const curry$9 = justCurryIt$1;
|
|
2062
2062
|
const { resolveUrl: resolveUrl$2, jsonTypeOf: jsonTypeOf$1 } = common$1;
|
|
2063
2063
|
const Reference$2 = reference;
|
|
@@ -2081,7 +2081,7 @@
|
|
|
2081
2081
|
|
|
2082
2082
|
const step$1 = (key, doc) => Object.freeze({
|
|
2083
2083
|
...doc,
|
|
2084
|
-
pointer: JsonPointer$
|
|
2084
|
+
pointer: JsonPointer$3.append(key, doc.pointer),
|
|
2085
2085
|
value: value$1(doc)[key]
|
|
2086
2086
|
});
|
|
2087
2087
|
|
|
@@ -2225,7 +2225,7 @@
|
|
|
2225
2225
|
], doc);
|
|
2226
2226
|
};
|
|
2227
2227
|
|
|
2228
|
-
var lib$
|
|
2228
|
+
var lib$3 = {
|
|
2229
2229
|
entries: entries$2,
|
|
2230
2230
|
map: map$3,
|
|
2231
2231
|
filter: filter,
|
|
@@ -2237,6 +2237,847 @@
|
|
|
2237
2237
|
allValues: allValues
|
|
2238
2238
|
};
|
|
2239
2239
|
|
|
2240
|
+
var moo$1 = {exports: {}};
|
|
2241
|
+
|
|
2242
|
+
(function (module) {
|
|
2243
|
+
(function(root, factory) {
|
|
2244
|
+
if (module.exports) {
|
|
2245
|
+
module.exports = factory();
|
|
2246
|
+
} else {
|
|
2247
|
+
root.moo = factory();
|
|
2248
|
+
}
|
|
2249
|
+
}(commonjsGlobal, function() {
|
|
2250
|
+
|
|
2251
|
+
var hasOwnProperty = Object.prototype.hasOwnProperty;
|
|
2252
|
+
var toString = Object.prototype.toString;
|
|
2253
|
+
var hasSticky = typeof new RegExp().sticky === 'boolean';
|
|
2254
|
+
|
|
2255
|
+
/***************************************************************************/
|
|
2256
|
+
|
|
2257
|
+
function isRegExp(o) { return o && toString.call(o) === '[object RegExp]' }
|
|
2258
|
+
function isObject(o) { return o && typeof o === 'object' && !isRegExp(o) && !Array.isArray(o) }
|
|
2259
|
+
|
|
2260
|
+
function reEscape(s) {
|
|
2261
|
+
return s.replace(/[-\/\\^$*+?.()|[\]{}]/g, '\\$&')
|
|
2262
|
+
}
|
|
2263
|
+
function reGroups(s) {
|
|
2264
|
+
var re = new RegExp('|' + s);
|
|
2265
|
+
return re.exec('').length - 1
|
|
2266
|
+
}
|
|
2267
|
+
function reCapture(s) {
|
|
2268
|
+
return '(' + s + ')'
|
|
2269
|
+
}
|
|
2270
|
+
function reUnion(regexps) {
|
|
2271
|
+
if (!regexps.length) return '(?!)'
|
|
2272
|
+
var source = regexps.map(function(s) {
|
|
2273
|
+
return "(?:" + s + ")"
|
|
2274
|
+
}).join('|');
|
|
2275
|
+
return "(?:" + source + ")"
|
|
2276
|
+
}
|
|
2277
|
+
|
|
2278
|
+
function regexpOrLiteral(obj) {
|
|
2279
|
+
if (typeof obj === 'string') {
|
|
2280
|
+
return '(?:' + reEscape(obj) + ')'
|
|
2281
|
+
|
|
2282
|
+
} else if (isRegExp(obj)) {
|
|
2283
|
+
// TODO: consider /u support
|
|
2284
|
+
if (obj.ignoreCase) throw new Error('RegExp /i flag not allowed')
|
|
2285
|
+
if (obj.global) throw new Error('RegExp /g flag is implied')
|
|
2286
|
+
if (obj.sticky) throw new Error('RegExp /y flag is implied')
|
|
2287
|
+
if (obj.multiline) throw new Error('RegExp /m flag is implied')
|
|
2288
|
+
return obj.source
|
|
2289
|
+
|
|
2290
|
+
} else {
|
|
2291
|
+
throw new Error('Not a pattern: ' + obj)
|
|
2292
|
+
}
|
|
2293
|
+
}
|
|
2294
|
+
|
|
2295
|
+
function objectToRules(object) {
|
|
2296
|
+
var keys = Object.getOwnPropertyNames(object);
|
|
2297
|
+
var result = [];
|
|
2298
|
+
for (var i = 0; i < keys.length; i++) {
|
|
2299
|
+
var key = keys[i];
|
|
2300
|
+
var thing = object[key];
|
|
2301
|
+
var rules = [].concat(thing);
|
|
2302
|
+
if (key === 'include') {
|
|
2303
|
+
for (var j = 0; j < rules.length; j++) {
|
|
2304
|
+
result.push({include: rules[j]});
|
|
2305
|
+
}
|
|
2306
|
+
continue
|
|
2307
|
+
}
|
|
2308
|
+
var match = [];
|
|
2309
|
+
rules.forEach(function(rule) {
|
|
2310
|
+
if (isObject(rule)) {
|
|
2311
|
+
if (match.length) result.push(ruleOptions(key, match));
|
|
2312
|
+
result.push(ruleOptions(key, rule));
|
|
2313
|
+
match = [];
|
|
2314
|
+
} else {
|
|
2315
|
+
match.push(rule);
|
|
2316
|
+
}
|
|
2317
|
+
});
|
|
2318
|
+
if (match.length) result.push(ruleOptions(key, match));
|
|
2319
|
+
}
|
|
2320
|
+
return result
|
|
2321
|
+
}
|
|
2322
|
+
|
|
2323
|
+
function arrayToRules(array) {
|
|
2324
|
+
var result = [];
|
|
2325
|
+
for (var i = 0; i < array.length; i++) {
|
|
2326
|
+
var obj = array[i];
|
|
2327
|
+
if (obj.include) {
|
|
2328
|
+
var include = [].concat(obj.include);
|
|
2329
|
+
for (var j = 0; j < include.length; j++) {
|
|
2330
|
+
result.push({include: include[j]});
|
|
2331
|
+
}
|
|
2332
|
+
continue
|
|
2333
|
+
}
|
|
2334
|
+
if (!obj.type) {
|
|
2335
|
+
throw new Error('Rule has no type: ' + JSON.stringify(obj))
|
|
2336
|
+
}
|
|
2337
|
+
result.push(ruleOptions(obj.type, obj));
|
|
2338
|
+
}
|
|
2339
|
+
return result
|
|
2340
|
+
}
|
|
2341
|
+
|
|
2342
|
+
function ruleOptions(type, obj) {
|
|
2343
|
+
if (!isObject(obj)) {
|
|
2344
|
+
obj = { match: obj };
|
|
2345
|
+
}
|
|
2346
|
+
if (obj.include) {
|
|
2347
|
+
throw new Error('Matching rules cannot also include states')
|
|
2348
|
+
}
|
|
2349
|
+
|
|
2350
|
+
// nb. error and fallback imply lineBreaks
|
|
2351
|
+
var options = {
|
|
2352
|
+
defaultType: type,
|
|
2353
|
+
lineBreaks: !!obj.error || !!obj.fallback,
|
|
2354
|
+
pop: false,
|
|
2355
|
+
next: null,
|
|
2356
|
+
push: null,
|
|
2357
|
+
error: false,
|
|
2358
|
+
fallback: false,
|
|
2359
|
+
value: null,
|
|
2360
|
+
type: null,
|
|
2361
|
+
shouldThrow: false,
|
|
2362
|
+
};
|
|
2363
|
+
|
|
2364
|
+
// Avoid Object.assign(), so we support IE9+
|
|
2365
|
+
for (var key in obj) {
|
|
2366
|
+
if (hasOwnProperty.call(obj, key)) {
|
|
2367
|
+
options[key] = obj[key];
|
|
2368
|
+
}
|
|
2369
|
+
}
|
|
2370
|
+
|
|
2371
|
+
// type transform cannot be a string
|
|
2372
|
+
if (typeof options.type === 'string' && type !== options.type) {
|
|
2373
|
+
throw new Error("Type transform cannot be a string (type '" + options.type + "' for token '" + type + "')")
|
|
2374
|
+
}
|
|
2375
|
+
|
|
2376
|
+
// convert to array
|
|
2377
|
+
var match = options.match;
|
|
2378
|
+
options.match = Array.isArray(match) ? match : match ? [match] : [];
|
|
2379
|
+
options.match.sort(function(a, b) {
|
|
2380
|
+
return isRegExp(a) && isRegExp(b) ? 0
|
|
2381
|
+
: isRegExp(b) ? -1 : isRegExp(a) ? +1 : b.length - a.length
|
|
2382
|
+
});
|
|
2383
|
+
return options
|
|
2384
|
+
}
|
|
2385
|
+
|
|
2386
|
+
function toRules(spec) {
|
|
2387
|
+
return Array.isArray(spec) ? arrayToRules(spec) : objectToRules(spec)
|
|
2388
|
+
}
|
|
2389
|
+
|
|
2390
|
+
var defaultErrorRule = ruleOptions('error', {lineBreaks: true, shouldThrow: true});
|
|
2391
|
+
function compileRules(rules, hasStates) {
|
|
2392
|
+
var errorRule = null;
|
|
2393
|
+
var fast = Object.create(null);
|
|
2394
|
+
var fastAllowed = true;
|
|
2395
|
+
var unicodeFlag = null;
|
|
2396
|
+
var groups = [];
|
|
2397
|
+
var parts = [];
|
|
2398
|
+
|
|
2399
|
+
// If there is a fallback rule, then disable fast matching
|
|
2400
|
+
for (var i = 0; i < rules.length; i++) {
|
|
2401
|
+
if (rules[i].fallback) {
|
|
2402
|
+
fastAllowed = false;
|
|
2403
|
+
}
|
|
2404
|
+
}
|
|
2405
|
+
|
|
2406
|
+
for (var i = 0; i < rules.length; i++) {
|
|
2407
|
+
var options = rules[i];
|
|
2408
|
+
|
|
2409
|
+
if (options.include) {
|
|
2410
|
+
// all valid inclusions are removed by states() preprocessor
|
|
2411
|
+
throw new Error('Inheritance is not allowed in stateless lexers')
|
|
2412
|
+
}
|
|
2413
|
+
|
|
2414
|
+
if (options.error || options.fallback) {
|
|
2415
|
+
// errorRule can only be set once
|
|
2416
|
+
if (errorRule) {
|
|
2417
|
+
if (!options.fallback === !errorRule.fallback) {
|
|
2418
|
+
throw new Error("Multiple " + (options.fallback ? "fallback" : "error") + " rules not allowed (for token '" + options.defaultType + "')")
|
|
2419
|
+
} else {
|
|
2420
|
+
throw new Error("fallback and error are mutually exclusive (for token '" + options.defaultType + "')")
|
|
2421
|
+
}
|
|
2422
|
+
}
|
|
2423
|
+
errorRule = options;
|
|
2424
|
+
}
|
|
2425
|
+
|
|
2426
|
+
var match = options.match.slice();
|
|
2427
|
+
if (fastAllowed) {
|
|
2428
|
+
while (match.length && typeof match[0] === 'string' && match[0].length === 1) {
|
|
2429
|
+
var word = match.shift();
|
|
2430
|
+
fast[word.charCodeAt(0)] = options;
|
|
2431
|
+
}
|
|
2432
|
+
}
|
|
2433
|
+
|
|
2434
|
+
// Warn about inappropriate state-switching options
|
|
2435
|
+
if (options.pop || options.push || options.next) {
|
|
2436
|
+
if (!hasStates) {
|
|
2437
|
+
throw new Error("State-switching options are not allowed in stateless lexers (for token '" + options.defaultType + "')")
|
|
2438
|
+
}
|
|
2439
|
+
if (options.fallback) {
|
|
2440
|
+
throw new Error("State-switching options are not allowed on fallback tokens (for token '" + options.defaultType + "')")
|
|
2441
|
+
}
|
|
2442
|
+
}
|
|
2443
|
+
|
|
2444
|
+
// Only rules with a .match are included in the RegExp
|
|
2445
|
+
if (match.length === 0) {
|
|
2446
|
+
continue
|
|
2447
|
+
}
|
|
2448
|
+
fastAllowed = false;
|
|
2449
|
+
|
|
2450
|
+
groups.push(options);
|
|
2451
|
+
|
|
2452
|
+
// Check unicode flag is used everywhere or nowhere
|
|
2453
|
+
for (var j = 0; j < match.length; j++) {
|
|
2454
|
+
var obj = match[j];
|
|
2455
|
+
if (!isRegExp(obj)) {
|
|
2456
|
+
continue
|
|
2457
|
+
}
|
|
2458
|
+
|
|
2459
|
+
if (unicodeFlag === null) {
|
|
2460
|
+
unicodeFlag = obj.unicode;
|
|
2461
|
+
} else if (unicodeFlag !== obj.unicode && options.fallback === false) {
|
|
2462
|
+
throw new Error('If one rule is /u then all must be')
|
|
2463
|
+
}
|
|
2464
|
+
}
|
|
2465
|
+
|
|
2466
|
+
// convert to RegExp
|
|
2467
|
+
var pat = reUnion(match.map(regexpOrLiteral));
|
|
2468
|
+
|
|
2469
|
+
// validate
|
|
2470
|
+
var regexp = new RegExp(pat);
|
|
2471
|
+
if (regexp.test("")) {
|
|
2472
|
+
throw new Error("RegExp matches empty string: " + regexp)
|
|
2473
|
+
}
|
|
2474
|
+
var groupCount = reGroups(pat);
|
|
2475
|
+
if (groupCount > 0) {
|
|
2476
|
+
throw new Error("RegExp has capture groups: " + regexp + "\nUse (?: … ) instead")
|
|
2477
|
+
}
|
|
2478
|
+
|
|
2479
|
+
// try and detect rules matching newlines
|
|
2480
|
+
if (!options.lineBreaks && regexp.test('\n')) {
|
|
2481
|
+
throw new Error('Rule should declare lineBreaks: ' + regexp)
|
|
2482
|
+
}
|
|
2483
|
+
|
|
2484
|
+
// store regex
|
|
2485
|
+
parts.push(reCapture(pat));
|
|
2486
|
+
}
|
|
2487
|
+
|
|
2488
|
+
|
|
2489
|
+
// If there's no fallback rule, use the sticky flag so we only look for
|
|
2490
|
+
// matches at the current index.
|
|
2491
|
+
//
|
|
2492
|
+
// If we don't support the sticky flag, then fake it using an irrefutable
|
|
2493
|
+
// match (i.e. an empty pattern).
|
|
2494
|
+
var fallbackRule = errorRule && errorRule.fallback;
|
|
2495
|
+
var flags = hasSticky && !fallbackRule ? 'ym' : 'gm';
|
|
2496
|
+
var suffix = hasSticky || fallbackRule ? '' : '|';
|
|
2497
|
+
|
|
2498
|
+
if (unicodeFlag === true) flags += "u";
|
|
2499
|
+
var combined = new RegExp(reUnion(parts) + suffix, flags);
|
|
2500
|
+
return {regexp: combined, groups: groups, fast: fast, error: errorRule || defaultErrorRule}
|
|
2501
|
+
}
|
|
2502
|
+
|
|
2503
|
+
function compile(rules) {
|
|
2504
|
+
var result = compileRules(toRules(rules));
|
|
2505
|
+
return new Lexer({start: result}, 'start')
|
|
2506
|
+
}
|
|
2507
|
+
|
|
2508
|
+
function checkStateGroup(g, name, map) {
|
|
2509
|
+
var state = g && (g.push || g.next);
|
|
2510
|
+
if (state && !map[state]) {
|
|
2511
|
+
throw new Error("Missing state '" + state + "' (in token '" + g.defaultType + "' of state '" + name + "')")
|
|
2512
|
+
}
|
|
2513
|
+
if (g && g.pop && +g.pop !== 1) {
|
|
2514
|
+
throw new Error("pop must be 1 (in token '" + g.defaultType + "' of state '" + name + "')")
|
|
2515
|
+
}
|
|
2516
|
+
}
|
|
2517
|
+
function compileStates(states, start) {
|
|
2518
|
+
var all = states.$all ? toRules(states.$all) : [];
|
|
2519
|
+
delete states.$all;
|
|
2520
|
+
|
|
2521
|
+
var keys = Object.getOwnPropertyNames(states);
|
|
2522
|
+
if (!start) start = keys[0];
|
|
2523
|
+
|
|
2524
|
+
var ruleMap = Object.create(null);
|
|
2525
|
+
for (var i = 0; i < keys.length; i++) {
|
|
2526
|
+
var key = keys[i];
|
|
2527
|
+
ruleMap[key] = toRules(states[key]).concat(all);
|
|
2528
|
+
}
|
|
2529
|
+
for (var i = 0; i < keys.length; i++) {
|
|
2530
|
+
var key = keys[i];
|
|
2531
|
+
var rules = ruleMap[key];
|
|
2532
|
+
var included = Object.create(null);
|
|
2533
|
+
for (var j = 0; j < rules.length; j++) {
|
|
2534
|
+
var rule = rules[j];
|
|
2535
|
+
if (!rule.include) continue
|
|
2536
|
+
var splice = [j, 1];
|
|
2537
|
+
if (rule.include !== key && !included[rule.include]) {
|
|
2538
|
+
included[rule.include] = true;
|
|
2539
|
+
var newRules = ruleMap[rule.include];
|
|
2540
|
+
if (!newRules) {
|
|
2541
|
+
throw new Error("Cannot include nonexistent state '" + rule.include + "' (in state '" + key + "')")
|
|
2542
|
+
}
|
|
2543
|
+
for (var k = 0; k < newRules.length; k++) {
|
|
2544
|
+
var newRule = newRules[k];
|
|
2545
|
+
if (rules.indexOf(newRule) !== -1) continue
|
|
2546
|
+
splice.push(newRule);
|
|
2547
|
+
}
|
|
2548
|
+
}
|
|
2549
|
+
rules.splice.apply(rules, splice);
|
|
2550
|
+
j--;
|
|
2551
|
+
}
|
|
2552
|
+
}
|
|
2553
|
+
|
|
2554
|
+
var map = Object.create(null);
|
|
2555
|
+
for (var i = 0; i < keys.length; i++) {
|
|
2556
|
+
var key = keys[i];
|
|
2557
|
+
map[key] = compileRules(ruleMap[key], true);
|
|
2558
|
+
}
|
|
2559
|
+
|
|
2560
|
+
for (var i = 0; i < keys.length; i++) {
|
|
2561
|
+
var name = keys[i];
|
|
2562
|
+
var state = map[name];
|
|
2563
|
+
var groups = state.groups;
|
|
2564
|
+
for (var j = 0; j < groups.length; j++) {
|
|
2565
|
+
checkStateGroup(groups[j], name, map);
|
|
2566
|
+
}
|
|
2567
|
+
var fastKeys = Object.getOwnPropertyNames(state.fast);
|
|
2568
|
+
for (var j = 0; j < fastKeys.length; j++) {
|
|
2569
|
+
checkStateGroup(state.fast[fastKeys[j]], name, map);
|
|
2570
|
+
}
|
|
2571
|
+
}
|
|
2572
|
+
|
|
2573
|
+
return new Lexer(map, start)
|
|
2574
|
+
}
|
|
2575
|
+
|
|
2576
|
+
function keywordTransform(map) {
|
|
2577
|
+
var reverseMap = Object.create(null);
|
|
2578
|
+
var byLength = Object.create(null);
|
|
2579
|
+
var types = Object.getOwnPropertyNames(map);
|
|
2580
|
+
for (var i = 0; i < types.length; i++) {
|
|
2581
|
+
var tokenType = types[i];
|
|
2582
|
+
var item = map[tokenType];
|
|
2583
|
+
var keywordList = Array.isArray(item) ? item : [item];
|
|
2584
|
+
keywordList.forEach(function(keyword) {
|
|
2585
|
+
(byLength[keyword.length] = byLength[keyword.length] || []).push(keyword);
|
|
2586
|
+
if (typeof keyword !== 'string') {
|
|
2587
|
+
throw new Error("keyword must be string (in keyword '" + tokenType + "')")
|
|
2588
|
+
}
|
|
2589
|
+
reverseMap[keyword] = tokenType;
|
|
2590
|
+
});
|
|
2591
|
+
}
|
|
2592
|
+
|
|
2593
|
+
// fast string lookup
|
|
2594
|
+
// https://jsperf.com/string-lookups
|
|
2595
|
+
function str(x) { return JSON.stringify(x) }
|
|
2596
|
+
var source = '';
|
|
2597
|
+
source += 'switch (value.length) {\n';
|
|
2598
|
+
for (var length in byLength) {
|
|
2599
|
+
var keywords = byLength[length];
|
|
2600
|
+
source += 'case ' + length + ':\n';
|
|
2601
|
+
source += 'switch (value) {\n';
|
|
2602
|
+
keywords.forEach(function(keyword) {
|
|
2603
|
+
var tokenType = reverseMap[keyword];
|
|
2604
|
+
source += 'case ' + str(keyword) + ': return ' + str(tokenType) + '\n';
|
|
2605
|
+
});
|
|
2606
|
+
source += '}\n';
|
|
2607
|
+
}
|
|
2608
|
+
source += '}\n';
|
|
2609
|
+
return Function('value', source) // type
|
|
2610
|
+
}
|
|
2611
|
+
|
|
2612
|
+
/***************************************************************************/
|
|
2613
|
+
|
|
2614
|
+
var Lexer = function(states, state) {
|
|
2615
|
+
this.startState = state;
|
|
2616
|
+
this.states = states;
|
|
2617
|
+
this.buffer = '';
|
|
2618
|
+
this.stack = [];
|
|
2619
|
+
this.reset();
|
|
2620
|
+
};
|
|
2621
|
+
|
|
2622
|
+
Lexer.prototype.reset = function(data, info) {
|
|
2623
|
+
this.buffer = data || '';
|
|
2624
|
+
this.index = 0;
|
|
2625
|
+
this.line = info ? info.line : 1;
|
|
2626
|
+
this.col = info ? info.col : 1;
|
|
2627
|
+
this.queuedToken = info ? info.queuedToken : null;
|
|
2628
|
+
this.queuedThrow = info ? info.queuedThrow : null;
|
|
2629
|
+
this.setState(info ? info.state : this.startState);
|
|
2630
|
+
this.stack = info && info.stack ? info.stack.slice() : [];
|
|
2631
|
+
return this
|
|
2632
|
+
};
|
|
2633
|
+
|
|
2634
|
+
Lexer.prototype.save = function() {
|
|
2635
|
+
return {
|
|
2636
|
+
line: this.line,
|
|
2637
|
+
col: this.col,
|
|
2638
|
+
state: this.state,
|
|
2639
|
+
stack: this.stack.slice(),
|
|
2640
|
+
queuedToken: this.queuedToken,
|
|
2641
|
+
queuedThrow: this.queuedThrow,
|
|
2642
|
+
}
|
|
2643
|
+
};
|
|
2644
|
+
|
|
2645
|
+
Lexer.prototype.setState = function(state) {
|
|
2646
|
+
if (!state || this.state === state) return
|
|
2647
|
+
this.state = state;
|
|
2648
|
+
var info = this.states[state];
|
|
2649
|
+
this.groups = info.groups;
|
|
2650
|
+
this.error = info.error;
|
|
2651
|
+
this.re = info.regexp;
|
|
2652
|
+
this.fast = info.fast;
|
|
2653
|
+
};
|
|
2654
|
+
|
|
2655
|
+
Lexer.prototype.popState = function() {
|
|
2656
|
+
this.setState(this.stack.pop());
|
|
2657
|
+
};
|
|
2658
|
+
|
|
2659
|
+
Lexer.prototype.pushState = function(state) {
|
|
2660
|
+
this.stack.push(this.state);
|
|
2661
|
+
this.setState(state);
|
|
2662
|
+
};
|
|
2663
|
+
|
|
2664
|
+
var eat = hasSticky ? function(re, buffer) { // assume re is /y
|
|
2665
|
+
return re.exec(buffer)
|
|
2666
|
+
} : function(re, buffer) { // assume re is /g
|
|
2667
|
+
var match = re.exec(buffer);
|
|
2668
|
+
// will always match, since we used the |(?:) trick
|
|
2669
|
+
if (match[0].length === 0) {
|
|
2670
|
+
return null
|
|
2671
|
+
}
|
|
2672
|
+
return match
|
|
2673
|
+
};
|
|
2674
|
+
|
|
2675
|
+
Lexer.prototype._getGroup = function(match) {
|
|
2676
|
+
var groupCount = this.groups.length;
|
|
2677
|
+
for (var i = 0; i < groupCount; i++) {
|
|
2678
|
+
if (match[i + 1] !== undefined) {
|
|
2679
|
+
return this.groups[i]
|
|
2680
|
+
}
|
|
2681
|
+
}
|
|
2682
|
+
throw new Error('Cannot find token type for matched text')
|
|
2683
|
+
};
|
|
2684
|
+
|
|
2685
|
+
function tokenToString() {
|
|
2686
|
+
return this.value
|
|
2687
|
+
}
|
|
2688
|
+
|
|
2689
|
+
Lexer.prototype.next = function() {
|
|
2690
|
+
var index = this.index;
|
|
2691
|
+
|
|
2692
|
+
// If a fallback token matched, we don't need to re-run the RegExp
|
|
2693
|
+
if (this.queuedGroup) {
|
|
2694
|
+
var token = this._token(this.queuedGroup, this.queuedText, index);
|
|
2695
|
+
this.queuedGroup = null;
|
|
2696
|
+
this.queuedText = "";
|
|
2697
|
+
return token
|
|
2698
|
+
}
|
|
2699
|
+
|
|
2700
|
+
var buffer = this.buffer;
|
|
2701
|
+
if (index === buffer.length) {
|
|
2702
|
+
return // EOF
|
|
2703
|
+
}
|
|
2704
|
+
|
|
2705
|
+
// Fast matching for single characters
|
|
2706
|
+
var group = this.fast[buffer.charCodeAt(index)];
|
|
2707
|
+
if (group) {
|
|
2708
|
+
return this._token(group, buffer.charAt(index), index)
|
|
2709
|
+
}
|
|
2710
|
+
|
|
2711
|
+
// Execute RegExp
|
|
2712
|
+
var re = this.re;
|
|
2713
|
+
re.lastIndex = index;
|
|
2714
|
+
var match = eat(re, buffer);
|
|
2715
|
+
|
|
2716
|
+
// Error tokens match the remaining buffer
|
|
2717
|
+
var error = this.error;
|
|
2718
|
+
if (match == null) {
|
|
2719
|
+
return this._token(error, buffer.slice(index, buffer.length), index)
|
|
2720
|
+
}
|
|
2721
|
+
|
|
2722
|
+
var group = this._getGroup(match);
|
|
2723
|
+
var text = match[0];
|
|
2724
|
+
|
|
2725
|
+
if (error.fallback && match.index !== index) {
|
|
2726
|
+
this.queuedGroup = group;
|
|
2727
|
+
this.queuedText = text;
|
|
2728
|
+
|
|
2729
|
+
// Fallback tokens contain the unmatched portion of the buffer
|
|
2730
|
+
return this._token(error, buffer.slice(index, match.index), index)
|
|
2731
|
+
}
|
|
2732
|
+
|
|
2733
|
+
return this._token(group, text, index)
|
|
2734
|
+
};
|
|
2735
|
+
|
|
2736
|
+
Lexer.prototype._token = function(group, text, offset) {
|
|
2737
|
+
// count line breaks
|
|
2738
|
+
var lineBreaks = 0;
|
|
2739
|
+
if (group.lineBreaks) {
|
|
2740
|
+
var matchNL = /\n/g;
|
|
2741
|
+
var nl = 1;
|
|
2742
|
+
if (text === '\n') {
|
|
2743
|
+
lineBreaks = 1;
|
|
2744
|
+
} else {
|
|
2745
|
+
while (matchNL.exec(text)) { lineBreaks++; nl = matchNL.lastIndex; }
|
|
2746
|
+
}
|
|
2747
|
+
}
|
|
2748
|
+
|
|
2749
|
+
var token = {
|
|
2750
|
+
type: (typeof group.type === 'function' && group.type(text)) || group.defaultType,
|
|
2751
|
+
value: typeof group.value === 'function' ? group.value(text) : text,
|
|
2752
|
+
text: text,
|
|
2753
|
+
toString: tokenToString,
|
|
2754
|
+
offset: offset,
|
|
2755
|
+
lineBreaks: lineBreaks,
|
|
2756
|
+
line: this.line,
|
|
2757
|
+
col: this.col,
|
|
2758
|
+
};
|
|
2759
|
+
// nb. adding more props to token object will make V8 sad!
|
|
2760
|
+
|
|
2761
|
+
var size = text.length;
|
|
2762
|
+
this.index += size;
|
|
2763
|
+
this.line += lineBreaks;
|
|
2764
|
+
if (lineBreaks !== 0) {
|
|
2765
|
+
this.col = size - nl + 1;
|
|
2766
|
+
} else {
|
|
2767
|
+
this.col += size;
|
|
2768
|
+
}
|
|
2769
|
+
|
|
2770
|
+
// throw, if no rule with {error: true}
|
|
2771
|
+
if (group.shouldThrow) {
|
|
2772
|
+
throw new Error(this.formatError(token, "invalid syntax"))
|
|
2773
|
+
}
|
|
2774
|
+
|
|
2775
|
+
if (group.pop) this.popState();
|
|
2776
|
+
else if (group.push) this.pushState(group.push);
|
|
2777
|
+
else if (group.next) this.setState(group.next);
|
|
2778
|
+
|
|
2779
|
+
return token
|
|
2780
|
+
};
|
|
2781
|
+
|
|
2782
|
+
if (typeof Symbol !== 'undefined' && Symbol.iterator) {
|
|
2783
|
+
var LexerIterator = function(lexer) {
|
|
2784
|
+
this.lexer = lexer;
|
|
2785
|
+
};
|
|
2786
|
+
|
|
2787
|
+
LexerIterator.prototype.next = function() {
|
|
2788
|
+
var token = this.lexer.next();
|
|
2789
|
+
return {value: token, done: !token}
|
|
2790
|
+
};
|
|
2791
|
+
|
|
2792
|
+
LexerIterator.prototype[Symbol.iterator] = function() {
|
|
2793
|
+
return this
|
|
2794
|
+
};
|
|
2795
|
+
|
|
2796
|
+
Lexer.prototype[Symbol.iterator] = function() {
|
|
2797
|
+
return new LexerIterator(this)
|
|
2798
|
+
};
|
|
2799
|
+
}
|
|
2800
|
+
|
|
2801
|
+
Lexer.prototype.formatError = function(token, message) {
|
|
2802
|
+
if (token == null) {
|
|
2803
|
+
// An undefined token indicates EOF
|
|
2804
|
+
var text = this.buffer.slice(this.index);
|
|
2805
|
+
var token = {
|
|
2806
|
+
text: text,
|
|
2807
|
+
offset: this.index,
|
|
2808
|
+
lineBreaks: text.indexOf('\n') === -1 ? 0 : 1,
|
|
2809
|
+
line: this.line,
|
|
2810
|
+
col: this.col,
|
|
2811
|
+
};
|
|
2812
|
+
}
|
|
2813
|
+
var start = Math.max(0, token.offset - token.col + 1);
|
|
2814
|
+
var eol = token.lineBreaks ? token.text.indexOf('\n') : token.text.length;
|
|
2815
|
+
var firstLine = this.buffer.substring(start, token.offset + eol);
|
|
2816
|
+
message += " at line " + token.line + " col " + token.col + ":\n\n";
|
|
2817
|
+
message += " " + firstLine + "\n";
|
|
2818
|
+
message += " " + Array(token.col).join(" ") + "^";
|
|
2819
|
+
return message
|
|
2820
|
+
};
|
|
2821
|
+
|
|
2822
|
+
Lexer.prototype.clone = function() {
|
|
2823
|
+
return new Lexer(this.states, this.state)
|
|
2824
|
+
};
|
|
2825
|
+
|
|
2826
|
+
Lexer.prototype.has = function(tokenType) {
|
|
2827
|
+
return true
|
|
2828
|
+
};
|
|
2829
|
+
|
|
2830
|
+
|
|
2831
|
+
return {
|
|
2832
|
+
compile: compile,
|
|
2833
|
+
states: compileStates,
|
|
2834
|
+
error: Object.freeze({error: true}),
|
|
2835
|
+
fallback: Object.freeze({fallback: true}),
|
|
2836
|
+
keywords: keywordTransform,
|
|
2837
|
+
}
|
|
2838
|
+
|
|
2839
|
+
}));
|
|
2840
|
+
} (moo$1));
|
|
2841
|
+
|
|
2842
|
+
const moo = moo$1.exports;
|
|
2843
|
+
|
|
2844
|
+
|
|
2845
|
+
const digit = `[0-9]`;
|
|
2846
|
+
const digit19 = `[1-9]`;
|
|
2847
|
+
const hexdig = `[0-9a-fA-F]`;
|
|
2848
|
+
|
|
2849
|
+
// String
|
|
2850
|
+
const unescaped = `[\\x20-\\x21\\x23-\\x5b\\x5d-\\u{10ffff}]`;
|
|
2851
|
+
const escape = `\\\\`;
|
|
2852
|
+
const escaped = `${escape}(?:["\\/\\\\brfnt]|u${hexdig}{4})`;
|
|
2853
|
+
const char = `(?:${unescaped}|${escaped})`;
|
|
2854
|
+
const string = `"${char}*"`;
|
|
2855
|
+
|
|
2856
|
+
// Number
|
|
2857
|
+
const int = `(?:0|${digit19}${digit}*)`;
|
|
2858
|
+
const frac = `\\.${digit}+`;
|
|
2859
|
+
const e = `[eE]`;
|
|
2860
|
+
const exp = `${e}[-+]?${digit}+`;
|
|
2861
|
+
const number = `-?${int}(?:${frac})?(?:${exp})?`;
|
|
2862
|
+
|
|
2863
|
+
// Whitespace
|
|
2864
|
+
const whitespace = `(?:(?:\\r?\\n)|[ \\t])+`;
|
|
2865
|
+
|
|
2866
|
+
var lexer = (json) => {
|
|
2867
|
+
const lexer = moo.states({
|
|
2868
|
+
main: {
|
|
2869
|
+
WS: { match: new RegExp(whitespace, "u"), lineBreaks: true },
|
|
2870
|
+
true: { match: "true", value: () => true },
|
|
2871
|
+
false: { match: "false", value: () => false },
|
|
2872
|
+
null: { match: "null", value: () => null },
|
|
2873
|
+
number: { match: new RegExp(number, "u"), value: parseFloat },
|
|
2874
|
+
string: { match: new RegExp(string, "u"), value: JSON.parse },
|
|
2875
|
+
"{": "{",
|
|
2876
|
+
"}": "}",
|
|
2877
|
+
"[": "[",
|
|
2878
|
+
"]": "]",
|
|
2879
|
+
":": ":",
|
|
2880
|
+
",": ",",
|
|
2881
|
+
error: moo.error
|
|
2882
|
+
}
|
|
2883
|
+
});
|
|
2884
|
+
lexer.reset(json);
|
|
2885
|
+
|
|
2886
|
+
const _next = () => {
|
|
2887
|
+
let token;
|
|
2888
|
+
do {
|
|
2889
|
+
token = lexer.next();
|
|
2890
|
+
if (token?.type === "error") {
|
|
2891
|
+
throw SyntaxError(lexer.formatError(token, "Unrecognized token"));
|
|
2892
|
+
}
|
|
2893
|
+
} while (token?.type === "WS");
|
|
2894
|
+
|
|
2895
|
+
return token;
|
|
2896
|
+
};
|
|
2897
|
+
|
|
2898
|
+
let previous;
|
|
2899
|
+
let nextToken = _next();
|
|
2900
|
+
|
|
2901
|
+
const next = (expectedType = undefined) => {
|
|
2902
|
+
previous = nextToken;
|
|
2903
|
+
nextToken = _next();
|
|
2904
|
+
if (expectedType && previous?.type !== expectedType) {
|
|
2905
|
+
throw SyntaxError(lexer.formatError(previous, `Expected a '${expectedType}'`));
|
|
2906
|
+
}
|
|
2907
|
+
return previous;
|
|
2908
|
+
};
|
|
2909
|
+
|
|
2910
|
+
const peek = () => nextToken;
|
|
2911
|
+
|
|
2912
|
+
const defaultErrorToken = { offset: 0, line: 1, col: 0, text: "" };
|
|
2913
|
+
const syntaxError = (message) => {
|
|
2914
|
+
const referenceToken = previous || defaultErrorToken;
|
|
2915
|
+
const errorToken = {
|
|
2916
|
+
...referenceToken,
|
|
2917
|
+
offset: referenceToken.offset + referenceToken.text.length,
|
|
2918
|
+
col: referenceToken.col + referenceToken.text.length
|
|
2919
|
+
};
|
|
2920
|
+
throw new SyntaxError(lexer.formatError(errorToken, message));
|
|
2921
|
+
};
|
|
2922
|
+
|
|
2923
|
+
return { next, peek, syntaxError };
|
|
2924
|
+
};
|
|
2925
|
+
|
|
2926
|
+
const JsonPointer$2 = lib$4;
|
|
2927
|
+
const jsonLexer = lexer;
|
|
2928
|
+
|
|
2929
|
+
|
|
2930
|
+
const defaultReviver = (key, value) => value;
|
|
2931
|
+
const parse$3 = (json, reviver = defaultReviver) => {
|
|
2932
|
+
const lexer = jsonLexer(json);
|
|
2933
|
+
const value = parseValue(lexer, "", JsonPointer$2.nil, reviver);
|
|
2934
|
+
|
|
2935
|
+
const token = lexer.peek();
|
|
2936
|
+
if (token) {
|
|
2937
|
+
lexer.syntaxError("A value has been parsed, but more tokens were found");
|
|
2938
|
+
}
|
|
2939
|
+
return value;
|
|
2940
|
+
};
|
|
2941
|
+
|
|
2942
|
+
const parseValue = (lexer, key, pointer, reviver) => {
|
|
2943
|
+
let value;
|
|
2944
|
+
const token = lexer.next();
|
|
2945
|
+
switch (token?.type) {
|
|
2946
|
+
case "true":
|
|
2947
|
+
case "false":
|
|
2948
|
+
case "null":
|
|
2949
|
+
case "number":
|
|
2950
|
+
case "string":
|
|
2951
|
+
value = token.value;
|
|
2952
|
+
break;
|
|
2953
|
+
case "{":
|
|
2954
|
+
value = parseObject(lexer, key, pointer, reviver);
|
|
2955
|
+
break;
|
|
2956
|
+
case "[":
|
|
2957
|
+
value = parseArray(lexer, key, pointer, reviver);
|
|
2958
|
+
break;
|
|
2959
|
+
default:
|
|
2960
|
+
lexer.syntaxError("Expected a JSON value");
|
|
2961
|
+
}
|
|
2962
|
+
|
|
2963
|
+
return reviver(key, value, pointer);
|
|
2964
|
+
};
|
|
2965
|
+
|
|
2966
|
+
const parseObject = (lexer, key, pointer, reviver) => {
|
|
2967
|
+
const value = {};
|
|
2968
|
+
|
|
2969
|
+
if (lexer.peek()?.type !== "}") {
|
|
2970
|
+
parseProperties(lexer, key, pointer, reviver, value);
|
|
2971
|
+
}
|
|
2972
|
+
|
|
2973
|
+
lexer.next("}");
|
|
2974
|
+
|
|
2975
|
+
return value;
|
|
2976
|
+
};
|
|
2977
|
+
|
|
2978
|
+
const parseProperties = (lexer, key, pointer, reviver, value) => {
|
|
2979
|
+
const propertyName = lexer.next("string").value;
|
|
2980
|
+
lexer.next(":");
|
|
2981
|
+
if (!isValueToken(lexer.peek())) {
|
|
2982
|
+
lexer.syntaxError("Expected a JSON value");
|
|
2983
|
+
}
|
|
2984
|
+
value[propertyName] = parseValue(lexer, propertyName, JsonPointer$2.append(propertyName, pointer), reviver);
|
|
2985
|
+
|
|
2986
|
+
if (lexer.peek()?.type === ",") {
|
|
2987
|
+
lexer.next(); // burn comma
|
|
2988
|
+
parseProperties(lexer, propertyName, pointer, reviver, value);
|
|
2989
|
+
} else if (isValueToken(lexer.peek())) {
|
|
2990
|
+
lexer.next(",");
|
|
2991
|
+
}
|
|
2992
|
+
};
|
|
2993
|
+
|
|
2994
|
+
const parseArray = (lexer, key, pointer, reviver) => {
|
|
2995
|
+
const value = [];
|
|
2996
|
+
|
|
2997
|
+
if (lexer.peek()?.type !== "]") {
|
|
2998
|
+
parseItems(lexer, 0, pointer, reviver, value);
|
|
2999
|
+
}
|
|
3000
|
+
|
|
3001
|
+
lexer.next("]");
|
|
3002
|
+
|
|
3003
|
+
return value;
|
|
3004
|
+
};
|
|
3005
|
+
|
|
3006
|
+
const parseItems = (lexer, key, pointer, reviver, value) => {
|
|
3007
|
+
if (!isValueToken(lexer.peek())) {
|
|
3008
|
+
lexer.syntaxError("Expected a JSON value");
|
|
3009
|
+
}
|
|
3010
|
+
value[key] = parseValue(lexer, key, JsonPointer$2.append(key, pointer), reviver);
|
|
3011
|
+
if (lexer.peek()?.type === ",") {
|
|
3012
|
+
lexer.next(); // burn comma
|
|
3013
|
+
parseItems(lexer, key + 1, pointer, reviver, value);
|
|
3014
|
+
} else if (isValueToken(lexer.peek())) {
|
|
3015
|
+
lexer.next(",");
|
|
3016
|
+
}
|
|
3017
|
+
};
|
|
3018
|
+
|
|
3019
|
+
const valueType = new Set(["string", "number", "true", "false", "null", "[", "{"]);
|
|
3020
|
+
const isValueToken = (token) => valueType.has(token?.type);
|
|
3021
|
+
|
|
3022
|
+
var parse_1 = parse$3;
|
|
3023
|
+
|
|
3024
|
+
const JsonPointer$1 = lib$4;
|
|
3025
|
+
|
|
3026
|
+
|
|
3027
|
+
const defaultReplacer = (key, value) => value;
|
|
3028
|
+
const stringify$2 = (value, replacer = defaultReplacer, space = "") => {
|
|
3029
|
+
return stringifyValue(value, replacer, space, "", JsonPointer$1.nil, 1);
|
|
3030
|
+
};
|
|
3031
|
+
|
|
3032
|
+
const stringifyValue = (value, replacer, space, key, pointer, depth) => {
|
|
3033
|
+
value = replacer(key, value, pointer);
|
|
3034
|
+
let result;
|
|
3035
|
+
if (Array.isArray(value)) {
|
|
3036
|
+
result = stringifyArray(value, replacer, space, pointer, depth);
|
|
3037
|
+
} else if (typeof value === "object" && value !== null) {
|
|
3038
|
+
result = stringifyObject(value, replacer, space, pointer, depth);
|
|
3039
|
+
} else {
|
|
3040
|
+
result = JSON.stringify(value);
|
|
3041
|
+
}
|
|
3042
|
+
|
|
3043
|
+
return result;
|
|
3044
|
+
};
|
|
3045
|
+
|
|
3046
|
+
const stringifyArray = (value, replacer, space, pointer, depth) => {
|
|
3047
|
+
if (value.length === 0) {
|
|
3048
|
+
space = "";
|
|
3049
|
+
}
|
|
3050
|
+
const padding = space ? `\n${space.repeat(depth - 1)}` : "";
|
|
3051
|
+
return "[" + padding + space + value
|
|
3052
|
+
.map((item, index) => {
|
|
3053
|
+
const indexPointer = JsonPointer$1.append(index, pointer);
|
|
3054
|
+
return stringifyValue(item, replacer, space, index, indexPointer, depth + 1);
|
|
3055
|
+
})
|
|
3056
|
+
.join(`,${padding}${space}`) + padding + "]";
|
|
3057
|
+
};
|
|
3058
|
+
|
|
3059
|
+
const stringifyObject = (value, replacer, space, pointer, depth) => {
|
|
3060
|
+
if (Object.keys(value).length === 0) {
|
|
3061
|
+
space = "";
|
|
3062
|
+
}
|
|
3063
|
+
const padding = space ? `\n${space.repeat(depth - 1)}` : "";
|
|
3064
|
+
const spacing = space ? " " : "";
|
|
3065
|
+
return "{" + padding + space + Object.entries(value)
|
|
3066
|
+
.map(([key, value]) => {
|
|
3067
|
+
const keyPointer = JsonPointer$1.append(key, pointer);
|
|
3068
|
+
return JSON.stringify(key) + ":" + spacing + stringifyValue(value, replacer, space, key, keyPointer, depth + 1);
|
|
3069
|
+
})
|
|
3070
|
+
.join(`,${padding}${space}`) + padding + "}";
|
|
3071
|
+
};
|
|
3072
|
+
|
|
3073
|
+
var stringify_1 = stringify$2;
|
|
3074
|
+
|
|
3075
|
+
const parse$2 = parse_1;
|
|
3076
|
+
const stringify$1 = stringify_1;
|
|
3077
|
+
|
|
3078
|
+
|
|
3079
|
+
var lib$2 = { parse: parse$2, stringify: stringify$1 };
|
|
3080
|
+
|
|
2240
3081
|
var fetch_browser = fetch;
|
|
2241
3082
|
|
|
2242
3083
|
var contentType = {};
|
|
@@ -2492,8 +3333,9 @@
|
|
|
2492
3333
|
var mediaTypes = { addPlugin, parse, getContentType };
|
|
2493
3334
|
|
|
2494
3335
|
const curry$1 = justCurryIt$1;
|
|
2495
|
-
const Pact$a = lib$
|
|
2496
|
-
const
|
|
3336
|
+
const Pact$a = lib$3;
|
|
3337
|
+
const Json = lib$2;
|
|
3338
|
+
const JsonPointer = lib$4;
|
|
2497
3339
|
const { jsonTypeOf, resolveUrl: resolveUrl$1, urlFragment, pathRelative } = common$1;
|
|
2498
3340
|
const fetch$1 = fetch_browser;
|
|
2499
3341
|
const Reference$1 = reference;
|
|
@@ -2775,36 +3617,43 @@
|
|
|
2775
3617
|
const toSchema = (schemaDoc, options = {}) => {
|
|
2776
3618
|
const fullOptions = { ...toSchemaDefaultOptions, ...options };
|
|
2777
3619
|
|
|
2778
|
-
const
|
|
2779
|
-
|
|
2780
|
-
|
|
3620
|
+
const anchorToken = getConfig(schemaDoc.dialectId, "anchorToken");
|
|
3621
|
+
const dynamicAnchorToken = getConfig(schemaDoc.dialectId, "dynamicAnchorToken");
|
|
3622
|
+
|
|
3623
|
+
const anchors = {};
|
|
3624
|
+
for (const anchor in schemaDoc.anchors) {
|
|
3625
|
+
if (anchor !== "" && !schemaDoc.dynamicAnchors[anchor]) {
|
|
3626
|
+
anchors[schemaDoc.anchors[anchor]] = anchor;
|
|
2781
3627
|
}
|
|
3628
|
+
}
|
|
3629
|
+
|
|
3630
|
+
const dynamicAnchors = {};
|
|
3631
|
+
for (const anchor in schemaDoc.dynamicAnchors) {
|
|
3632
|
+
const pointer = urlFragment(schemaDoc.dynamicAnchors[anchor]);
|
|
3633
|
+
dynamicAnchors[pointer] = anchor;
|
|
3634
|
+
}
|
|
2782
3635
|
|
|
2783
|
-
|
|
2784
|
-
|
|
2785
|
-
|
|
2786
|
-
|
|
2787
|
-
|
|
3636
|
+
const schema = JSON.parse(Json.stringify(schemaDoc.schema, (key, value, pointer) => {
|
|
3637
|
+
if (Reference$1.isReference(value)) {
|
|
3638
|
+
const refValue = Reference$1.value(value);
|
|
3639
|
+
const embeddedDialect = typeof refValue.$schema === "string" ? resolveUrl$1(refValue.$schema, "") : schemaDoc.dialectId;
|
|
3640
|
+
const embeddedToken = getConfig(embeddedDialect, "embeddedToken");
|
|
3641
|
+
if (!fullOptions.includeEmbedded && embeddedToken in refValue) {
|
|
3642
|
+
return;
|
|
3643
|
+
} else {
|
|
3644
|
+
return Reference$1.value(value);
|
|
3645
|
+
}
|
|
2788
3646
|
} else {
|
|
2789
|
-
|
|
3647
|
+
if (pointer in anchors) {
|
|
3648
|
+
value = { [anchorToken]: anchors[pointer], ...value };
|
|
3649
|
+
}
|
|
3650
|
+
if (pointer in dynamicAnchors) {
|
|
3651
|
+
value = { [dynamicAnchorToken]: dynamicAnchors[pointer], ...value };
|
|
3652
|
+
}
|
|
3653
|
+
return value;
|
|
2790
3654
|
}
|
|
2791
3655
|
}));
|
|
2792
3656
|
|
|
2793
|
-
const dynamicAnchorToken = getConfig(schemaDoc.dialectId, "dynamicAnchorToken");
|
|
2794
|
-
Object.entries(schemaDoc.dynamicAnchors)
|
|
2795
|
-
.forEach(([anchor, uri]) => {
|
|
2796
|
-
const pointer = JsonPointer.append(dynamicAnchorToken, urlFragment(uri));
|
|
2797
|
-
JsonPointer.assign(pointer, schema, anchor);
|
|
2798
|
-
});
|
|
2799
|
-
|
|
2800
|
-
const anchorToken = getConfig(schemaDoc.dialectId, "anchorToken");
|
|
2801
|
-
Object.entries(schemaDoc.anchors)
|
|
2802
|
-
.filter(([anchor]) => anchor !== "" && !(anchor in schemaDoc.dynamicAnchors))
|
|
2803
|
-
.forEach(([anchor, pointer]) => {
|
|
2804
|
-
const anchorPointer = JsonPointer.append(anchorToken, pointer);
|
|
2805
|
-
JsonPointer.assign(anchorPointer, schema, anchor);
|
|
2806
|
-
});
|
|
2807
|
-
|
|
2808
3657
|
const baseToken = getConfig(schemaDoc.dialectId, "baseToken");
|
|
2809
3658
|
const id = relativeUri(fullOptions.parentId, schemaDoc.id);
|
|
2810
3659
|
const dialect = fullOptions.parentDialect === schemaDoc.dialectId ? "" : schemaDoc.dialectId;
|
|
@@ -3055,7 +3904,7 @@
|
|
|
3055
3904
|
addMediaTypePlugin: MediaTypes.addPlugin
|
|
3056
3905
|
};
|
|
3057
3906
|
|
|
3058
|
-
const Pact$9 = lib$
|
|
3907
|
+
const Pact$9 = lib$3;
|
|
3059
3908
|
const PubSub = pubsub.exports;
|
|
3060
3909
|
const Core$x = core$2;
|
|
3061
3910
|
const Instance$B = instance;
|
|
@@ -3286,7 +4135,7 @@
|
|
|
3286
4135
|
var additionalProperties6 = { compile: compile$G, interpret: interpret$G, collectEvaluatedProperties: collectEvaluatedProperties$c };
|
|
3287
4136
|
|
|
3288
4137
|
const { Core: Core$r, Schema: Schema$H } = lib$1;
|
|
3289
|
-
const Pact$8 = lib$
|
|
4138
|
+
const Pact$8 = lib$3;
|
|
3290
4139
|
|
|
3291
4140
|
|
|
3292
4141
|
const compile$F = (schema, ast) => Pact$8.pipeline([
|
|
@@ -3315,7 +4164,7 @@
|
|
|
3315
4164
|
var allOf = { compile: compile$F, interpret: interpret$F, collectEvaluatedProperties: collectEvaluatedProperties$b, collectEvaluatedItems: collectEvaluatedItems$c };
|
|
3316
4165
|
|
|
3317
4166
|
const { Core: Core$q, Schema: Schema$G } = lib$1;
|
|
3318
|
-
const Pact$7 = lib$
|
|
4167
|
+
const Pact$7 = lib$3;
|
|
3319
4168
|
|
|
3320
4169
|
|
|
3321
4170
|
const compile$E = (schema, ast) => Pact$7.pipeline([
|
|
@@ -3458,7 +4307,7 @@
|
|
|
3458
4307
|
var containsMinContainsMaxContains = { compile: compile$B, interpret: interpret$B, collectEvaluatedItems: collectEvaluatedItems$a };
|
|
3459
4308
|
|
|
3460
4309
|
const { Core: Core$n, Schema: Schema$D } = lib$1;
|
|
3461
|
-
const Pact$6 = lib$
|
|
4310
|
+
const Pact$6 = lib$3;
|
|
3462
4311
|
|
|
3463
4312
|
|
|
3464
4313
|
const compile$A = async (schema, ast) => {
|
|
@@ -3474,7 +4323,7 @@
|
|
|
3474
4323
|
var definitions = { compile: compile$A, interpret: interpret$A };
|
|
3475
4324
|
|
|
3476
4325
|
const { Core: Core$m, Schema: Schema$C, Instance: Instance$s } = lib$1;
|
|
3477
|
-
const Pact$5 = lib$
|
|
4326
|
+
const Pact$5 = lib$3;
|
|
3478
4327
|
|
|
3479
4328
|
|
|
3480
4329
|
const compile$z = (schema, ast) => Pact$5.pipeline([
|
|
@@ -3504,7 +4353,7 @@
|
|
|
3504
4353
|
var dependencies = { compile: compile$z, interpret: interpret$z };
|
|
3505
4354
|
|
|
3506
4355
|
const { Schema: Schema$B, Instance: Instance$r } = lib$1;
|
|
3507
|
-
const Pact$4 = lib$
|
|
4356
|
+
const Pact$4 = lib$3;
|
|
3508
4357
|
|
|
3509
4358
|
|
|
3510
4359
|
const compile$y = (schema) => Pact$4.pipeline([
|
|
@@ -3524,7 +4373,7 @@
|
|
|
3524
4373
|
var dependentRequired = { compile: compile$y, interpret: interpret$y };
|
|
3525
4374
|
|
|
3526
4375
|
const { Core: Core$l, Schema: Schema$A, Instance: Instance$q } = lib$1;
|
|
3527
|
-
const Pact$3 = lib$
|
|
4376
|
+
const Pact$3 = lib$3;
|
|
3528
4377
|
|
|
3529
4378
|
|
|
3530
4379
|
const compile$x = (schema, ast) => Pact$3.pipeline([
|
|
@@ -3933,7 +4782,7 @@
|
|
|
3933
4782
|
var pattern = { compile: compile$b, interpret: interpret$b };
|
|
3934
4783
|
|
|
3935
4784
|
const { Core: Core$d, Schema: Schema$f, Instance: Instance$8 } = lib$1;
|
|
3936
|
-
const Pact$2 = lib$
|
|
4785
|
+
const Pact$2 = lib$3;
|
|
3937
4786
|
|
|
3938
4787
|
|
|
3939
4788
|
const compile$a = (schema, ast) => Pact$2.pipeline([
|
|
@@ -3971,7 +4820,7 @@
|
|
|
3971
4820
|
var common = { isObject, escapeRegExp: escapeRegExp$1, splitUrl: splitUrl$1 };
|
|
3972
4821
|
|
|
3973
4822
|
const { Core: Core$c, Schema: Schema$e, Instance: Instance$7 } = lib$1;
|
|
3974
|
-
const Pact$1 = lib$
|
|
4823
|
+
const Pact$1 = lib$3;
|
|
3975
4824
|
const { escapeRegExp } = common;
|
|
3976
4825
|
|
|
3977
4826
|
|
|
@@ -4059,7 +4908,7 @@
|
|
|
4059
4908
|
var required = { compile: compile$5, interpret: interpret$5 };
|
|
4060
4909
|
|
|
4061
4910
|
const { Core: Core$8, Schema: Schema$a, Instance: Instance$4 } = lib$1;
|
|
4062
|
-
const Pact = lib$
|
|
4911
|
+
const Pact = lib$3;
|
|
4063
4912
|
|
|
4064
4913
|
|
|
4065
4914
|
const compile$4 = (schema, ast) => {
|