scon-notation 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +91 -0
- package/package.json +45 -0
- package/src/decoder.js +761 -0
- package/src/encoder.js +506 -0
- package/src/minifier.js +143 -0
- package/src/schema-registry.js +204 -0
- package/src/scon.js +121 -0
- package/src/tree-hash.js +227 -0
- package/src/validator.js +131 -0
|
@@ -0,0 +1,204 @@
|
|
|
1
|
+
// src/schema-registry.js
|
|
2
|
+
// Port of bX\Scon\SchemaRegistry — schema/response/security registration and resolution
|
|
3
|
+
|
|
4
|
+
export class SchemaRegistry {
|
|
5
|
+
|
|
6
|
+
constructor() {
|
|
7
|
+
this.schemas = {};
|
|
8
|
+
this.responses = {};
|
|
9
|
+
this.security = {};
|
|
10
|
+
this.resolving = {}; // cycle detection
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
// Register a definition
|
|
14
|
+
register(type, name, definition) {
|
|
15
|
+
switch (type) {
|
|
16
|
+
case 's': this.schemas[name] = definition; break;
|
|
17
|
+
case 'r': this.responses[name] = definition; break;
|
|
18
|
+
case 'sec': this.security[name] = definition; break;
|
|
19
|
+
default: throw new Error(`Unknown definition type: ${type}`);
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
// Resolve a reference by type and name
|
|
24
|
+
resolve(type, name) {
|
|
25
|
+
const store = this._getStore(type);
|
|
26
|
+
if (!Object.hasOwn(store, name)) {
|
|
27
|
+
throw new Error(`Undefined reference: @${type}:${name}`);
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
const refKey = `${type}:${name}`;
|
|
31
|
+
if (Object.hasOwn(this.resolving, refKey)) {
|
|
32
|
+
// Circular reference — return marker for lazy resolution
|
|
33
|
+
return { '$ref': `#/definitions/${name}` };
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
this.resolving[refKey] = true;
|
|
37
|
+
const resolved = this._deepResolveRefs(store[name]);
|
|
38
|
+
delete this.resolving[refKey];
|
|
39
|
+
|
|
40
|
+
return resolved;
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
// Resolve with override (deep merge + dot-notation + field removal)
|
|
44
|
+
resolveWithOverride(type, name, overrides) {
|
|
45
|
+
let base = this.resolve(type, name);
|
|
46
|
+
|
|
47
|
+
// Process field removals first
|
|
48
|
+
const removals = [];
|
|
49
|
+
const merges = {};
|
|
50
|
+
for (const key of Object.keys(overrides)) {
|
|
51
|
+
if (key.startsWith('-')) {
|
|
52
|
+
removals.push(key.slice(1));
|
|
53
|
+
} else {
|
|
54
|
+
merges[key] = overrides[key];
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
// Apply removals
|
|
59
|
+
for (const field of removals) {
|
|
60
|
+
if (field.includes('.')) {
|
|
61
|
+
this._unsetDotPath(base, field);
|
|
62
|
+
} else {
|
|
63
|
+
delete base[field];
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
// Apply deep merges with dot-notation support
|
|
68
|
+
for (const [key, val] of Object.entries(merges)) {
|
|
69
|
+
if (key.includes('.')) {
|
|
70
|
+
this._setDotPath(base, key, val);
|
|
71
|
+
} else {
|
|
72
|
+
if (
|
|
73
|
+
val !== null && typeof val === 'object' && !Array.isArray(val) &&
|
|
74
|
+
Object.hasOwn(base, key) && typeof base[key] === 'object' && base[key] !== null && !Array.isArray(base[key])
|
|
75
|
+
) {
|
|
76
|
+
base[key] = this._deepMerge(base[key], val);
|
|
77
|
+
} else {
|
|
78
|
+
base[key] = val;
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
return base;
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
// Resolve polymorphic references (oneOf with pipe)
|
|
87
|
+
resolvePolymorphic(refs) {
|
|
88
|
+
const schemas = refs.map(ref => this.resolve(ref.type, ref.name));
|
|
89
|
+
return { oneOf: schemas };
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
// Check if a definition exists
|
|
93
|
+
has(type, name) {
|
|
94
|
+
try {
|
|
95
|
+
const store = this._getStore(type);
|
|
96
|
+
return Object.hasOwn(store, name);
|
|
97
|
+
} catch {
|
|
98
|
+
return false;
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
// Get all definitions of a type (for encoding)
|
|
103
|
+
getAll(type) {
|
|
104
|
+
try {
|
|
105
|
+
return this._getStore(type);
|
|
106
|
+
} catch {
|
|
107
|
+
return {};
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
// Reset all definitions
|
|
112
|
+
reset() {
|
|
113
|
+
this.schemas = {};
|
|
114
|
+
this.responses = {};
|
|
115
|
+
this.security = {};
|
|
116
|
+
this.resolving = {};
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
// --- Private ---
|
|
120
|
+
|
|
121
|
+
_getStore(type) {
|
|
122
|
+
switch (type) {
|
|
123
|
+
case 's': return this.schemas;
|
|
124
|
+
case 'r': return this.responses;
|
|
125
|
+
case 'sec': return this.security;
|
|
126
|
+
default: throw new Error(`Unknown ref type: ${type}`);
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
// Deep-resolve any @ref markers within a definition
|
|
131
|
+
_deepResolveRefs(data) {
|
|
132
|
+
if (data === null || typeof data !== 'object') return data;
|
|
133
|
+
if (Array.isArray(data)) return data.map(item => this._deepResolveRefs(item));
|
|
134
|
+
|
|
135
|
+
const result = {};
|
|
136
|
+
for (const [key, val] of Object.entries(data)) {
|
|
137
|
+
if (val !== null && typeof val === 'object' && !Array.isArray(val)) {
|
|
138
|
+
if ('@ref' in val) {
|
|
139
|
+
const ref = val['@ref'];
|
|
140
|
+
if ('@overrides' in val) {
|
|
141
|
+
result[key] = this.resolveWithOverride(ref.type, ref.name, val['@overrides']);
|
|
142
|
+
} else {
|
|
143
|
+
result[key] = this.resolve(ref.type, ref.name);
|
|
144
|
+
}
|
|
145
|
+
} else if ('@polymorphic' in val) {
|
|
146
|
+
result[key] = this.resolvePolymorphic(val['@polymorphic']);
|
|
147
|
+
} else {
|
|
148
|
+
result[key] = this._deepResolveRefs(val);
|
|
149
|
+
}
|
|
150
|
+
} else {
|
|
151
|
+
result[key] = val;
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
return result;
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
// Set a value using dot-notation path (a.b.c = val)
|
|
158
|
+
_setDotPath(obj, path, val) {
|
|
159
|
+
const keys = path.split('.');
|
|
160
|
+
let ref = obj;
|
|
161
|
+
for (let i = 0; i < keys.length; i++) {
|
|
162
|
+
if (i === keys.length - 1) {
|
|
163
|
+
ref[keys[i]] = val;
|
|
164
|
+
} else {
|
|
165
|
+
if (!Object.hasOwn(ref, keys[i]) || typeof ref[keys[i]] !== 'object' || ref[keys[i]] === null) {
|
|
166
|
+
ref[keys[i]] = {};
|
|
167
|
+
}
|
|
168
|
+
ref = ref[keys[i]];
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
// Unset a value using dot-notation path
|
|
174
|
+
_unsetDotPath(obj, path) {
|
|
175
|
+
const keys = path.split('.');
|
|
176
|
+
let ref = obj;
|
|
177
|
+
for (let i = 0; i < keys.length; i++) {
|
|
178
|
+
if (i === keys.length - 1) {
|
|
179
|
+
delete ref[keys[i]];
|
|
180
|
+
} else {
|
|
181
|
+
if (!Object.hasOwn(ref, keys[i]) || typeof ref[keys[i]] !== 'object' || ref[keys[i]] === null) {
|
|
182
|
+
return;
|
|
183
|
+
}
|
|
184
|
+
ref = ref[keys[i]];
|
|
185
|
+
}
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
// Deep merge: objects merge recursively, arrays replace
|
|
190
|
+
_deepMerge(base, override) {
|
|
191
|
+
const result = { ...base };
|
|
192
|
+
for (const [key, val] of Object.entries(override)) {
|
|
193
|
+
if (
|
|
194
|
+
val !== null && typeof val === 'object' && !Array.isArray(val) &&
|
|
195
|
+
Object.hasOwn(result, key) && typeof result[key] === 'object' && result[key] !== null && !Array.isArray(result[key])
|
|
196
|
+
) {
|
|
197
|
+
result[key] = this._deepMerge(result[key], val);
|
|
198
|
+
} else {
|
|
199
|
+
result[key] = val;
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
return result;
|
|
203
|
+
}
|
|
204
|
+
}
|
package/src/scon.js
ADDED
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
// src/scon.js
|
|
2
|
+
// S.C.O.N. — Schema-Compact Object Notation
|
|
3
|
+
// Extension: .scon | Content-Type: text/scon; charset=utf-8
|
|
4
|
+
//
|
|
5
|
+
// WASM-accelerated: loads Rust tape decoder via WebAssembly when available,
|
|
6
|
+
// falls back to pure JS implementation transparently.
|
|
7
|
+
//
|
|
8
|
+
// Performance comparison (OpenAPI 3.1 spec, 71 endpoints):
|
|
9
|
+
//
|
|
10
|
+
// Format | Bytes | Ratio | Gzip |
|
|
11
|
+
// -----------------|---------|-------|---------|
|
|
12
|
+
// JSON | 90,886 | 1.00x | 4,632 |
|
|
13
|
+
// SCON | 26,347 | 0.29x | 3,969 |
|
|
14
|
+
// SCON (minified) | 20,211 | 0.22x | 3,818 |
|
|
15
|
+
//
|
|
16
|
+
// SCON achieves ~71% reduction vs JSON by extracting repeated schema
|
|
17
|
+
// definitions (s:, r:, sec:) and referencing them (@s:).
|
|
18
|
+
|
|
19
|
+
import { Encoder } from './encoder.js';
|
|
20
|
+
import { Decoder } from './decoder.js';
|
|
21
|
+
import { Minifier } from './minifier.js';
|
|
22
|
+
import { Validator } from './validator.js';
|
|
23
|
+
|
|
24
|
+
// WASM module — lazy-loaded, null until init
|
|
25
|
+
let wasmModule = null;
|
|
26
|
+
let wasmInitPromise = null;
|
|
27
|
+
|
|
28
|
+
async function loadWasm() {
|
|
29
|
+
try {
|
|
30
|
+
const wasm = await import('scon-wasm');
|
|
31
|
+
if (wasm.default && typeof wasm.default === 'function') {
|
|
32
|
+
await wasm.default();
|
|
33
|
+
}
|
|
34
|
+
wasmModule = wasm;
|
|
35
|
+
} catch {
|
|
36
|
+
wasmModule = null;
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
// Trigger WASM load on import (non-blocking)
|
|
41
|
+
function ensureWasm() {
|
|
42
|
+
if (!wasmInitPromise) {
|
|
43
|
+
wasmInitPromise = loadWasm();
|
|
44
|
+
}
|
|
45
|
+
return wasmInitPromise;
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
// Start loading immediately
|
|
49
|
+
ensureWasm();
|
|
50
|
+
|
|
51
|
+
// Encode JS data to SCON string
|
|
52
|
+
// Returns string (sync) or Promise<string> if options.autoExtract is true
|
|
53
|
+
function encode(data, options = {}) {
|
|
54
|
+
// WASM fast path — no schema/response/security options (those are JS-only features)
|
|
55
|
+
if (wasmModule && !options.autoExtract && !options.schemas && !options.responses && !options.security) {
|
|
56
|
+
const indent = options.indent ?? 1;
|
|
57
|
+
return indent > 1
|
|
58
|
+
? wasmModule.scon_encode_indent(data, indent)
|
|
59
|
+
: wasmModule.scon_encode(data);
|
|
60
|
+
}
|
|
61
|
+
const encoder = new Encoder(options);
|
|
62
|
+
const schemas = options.schemas || {};
|
|
63
|
+
const responses = options.responses || {};
|
|
64
|
+
const security = options.security || {};
|
|
65
|
+
return encoder.encode(data, schemas, responses, security);
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
// Decode SCON string to JS object
|
|
69
|
+
function decode(sconString, options = {}) {
|
|
70
|
+
// WASM fast path — single crossing: tape→JSON string in WASM, JSON.parse in V8 native C++
|
|
71
|
+
if (wasmModule && !options.schemas && !options.responses && !options.security) {
|
|
72
|
+
return JSON.parse(wasmModule.scon_to_json(sconString));
|
|
73
|
+
}
|
|
74
|
+
const decoder = new Decoder(options);
|
|
75
|
+
return decoder.decode(sconString);
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
// Minify SCON string to single line
|
|
79
|
+
function minify(sconString) {
|
|
80
|
+
if (wasmModule) {
|
|
81
|
+
return wasmModule.scon_minify(sconString);
|
|
82
|
+
}
|
|
83
|
+
return Minifier.minify(sconString);
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
// Expand minified SCON to indented format
|
|
87
|
+
function expand(minifiedString, options = {}) {
|
|
88
|
+
const indent = options.indent ?? 1;
|
|
89
|
+
if (wasmModule) {
|
|
90
|
+
return wasmModule.scon_expand(minifiedString, indent);
|
|
91
|
+
}
|
|
92
|
+
return Minifier.expand(minifiedString, indent);
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
// Validate SCON data against rules (JS-only, no WASM equivalent)
|
|
96
|
+
function validate(data, options = {}) {
|
|
97
|
+
const validator = new Validator(options);
|
|
98
|
+
return validator.validate(data);
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
// Wait for WASM to be ready (optional — operations work without it via fallback)
|
|
102
|
+
async function ready() {
|
|
103
|
+
await ensureWasm();
|
|
104
|
+
return wasmModule !== null;
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
const SCON = Object.freeze({
|
|
108
|
+
encode,
|
|
109
|
+
decode,
|
|
110
|
+
minify,
|
|
111
|
+
expand,
|
|
112
|
+
validate,
|
|
113
|
+
ready,
|
|
114
|
+
// Aliases estilo JSON
|
|
115
|
+
parse: decode,
|
|
116
|
+
stringify: encode,
|
|
117
|
+
});
|
|
118
|
+
|
|
119
|
+
export default SCON;
|
|
120
|
+
export { SCON, Encoder, Decoder, Minifier, Validator };
|
|
121
|
+
export { SchemaRegistry } from './schema-registry.js';
|
package/src/tree-hash.js
ADDED
|
@@ -0,0 +1,227 @@
|
|
|
1
|
+
// src/tree-hash.js
|
|
2
|
+
// Port of bX\TreeHash — structural hashing via xxh128 (hash-wasm)
|
|
3
|
+
|
|
4
|
+
import { createXXHash128 } from 'hash-wasm';
|
|
5
|
+
|
|
6
|
+
let hasherInstance = null;
|
|
7
|
+
|
|
8
|
+
// Initialize hash-wasm xxh128 (lazy, once)
|
|
9
|
+
async function getHasher() {
|
|
10
|
+
if (!hasherInstance) {
|
|
11
|
+
hasherInstance = await createXXHash128();
|
|
12
|
+
}
|
|
13
|
+
return hasherInstance;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
// Compute xxh128 hex digest of a string
|
|
17
|
+
async function xxh128(str) {
|
|
18
|
+
const hasher = await getHasher();
|
|
19
|
+
hasher.init();
|
|
20
|
+
hasher.update(str);
|
|
21
|
+
return hasher.digest('hex');
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
// Sync xxh128 — only works after init (for fingerprint fallback)
|
|
25
|
+
function xxh128Sync(str) {
|
|
26
|
+
if (!hasherInstance) {
|
|
27
|
+
throw new Error('TreeHash not initialized. Call await TreeHash.hash() first or await TreeHash.init()');
|
|
28
|
+
}
|
|
29
|
+
hasherInstance.init();
|
|
30
|
+
hasherInstance.update(str);
|
|
31
|
+
return hasherInstance.digest('hex');
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
function isSequential(arr) {
|
|
35
|
+
if (!Array.isArray(arr)) return false;
|
|
36
|
+
if (arr.length === 0) return true;
|
|
37
|
+
for (let i = 0; i < arr.length; i++) {
|
|
38
|
+
if (!(i in arr)) return false;
|
|
39
|
+
}
|
|
40
|
+
return true;
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
function recursiveKsort(data) {
|
|
44
|
+
if (!Array.isArray(data) && typeof data === 'object' && data !== null) {
|
|
45
|
+
const sorted = {};
|
|
46
|
+
const keys = Object.keys(data).sort();
|
|
47
|
+
for (const k of keys) {
|
|
48
|
+
sorted[k] = recursiveKsort(data[k]);
|
|
49
|
+
}
|
|
50
|
+
return sorted;
|
|
51
|
+
}
|
|
52
|
+
if (Array.isArray(data)) {
|
|
53
|
+
return data.map(item => {
|
|
54
|
+
if (typeof item === 'object' && item !== null) return recursiveKsort(item);
|
|
55
|
+
return item;
|
|
56
|
+
});
|
|
57
|
+
}
|
|
58
|
+
return data;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
function collectHashesHybrid(data, path, index, minKeys) {
|
|
62
|
+
for (const [key, val] of Object.entries(data)) {
|
|
63
|
+
if (val === null || typeof val !== 'object') continue;
|
|
64
|
+
|
|
65
|
+
const childPath = path === '' ? String(key) : `${path}.${key}`;
|
|
66
|
+
|
|
67
|
+
if (Array.isArray(val)) {
|
|
68
|
+
// Traverse lists to find sub-objects
|
|
69
|
+
for (let i = 0; i < val.length; i++) {
|
|
70
|
+
const item = val[i];
|
|
71
|
+
if (item !== null && typeof item === 'object' && !Array.isArray(item)) {
|
|
72
|
+
const itemPath = `${childPath}.[${i}]`;
|
|
73
|
+
const itemKeys = Object.keys(item);
|
|
74
|
+
if (itemKeys.length >= minKeys) {
|
|
75
|
+
const json = JSON.stringify(item);
|
|
76
|
+
const hash = xxh128Sync(json);
|
|
77
|
+
if (Object.hasOwn(index, hash)) {
|
|
78
|
+
index[hash].count++;
|
|
79
|
+
} else {
|
|
80
|
+
index[hash] = { count: 1, path: itemPath, data: item };
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
collectHashesHybrid(item, itemPath, index, minKeys);
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
} else {
|
|
87
|
+
// Associative object
|
|
88
|
+
const valKeys = Object.keys(val);
|
|
89
|
+
if (valKeys.length >= minKeys) {
|
|
90
|
+
const json = JSON.stringify(val);
|
|
91
|
+
const hash = xxh128Sync(json);
|
|
92
|
+
if (Object.hasOwn(index, hash)) {
|
|
93
|
+
index[hash].count++;
|
|
94
|
+
} else {
|
|
95
|
+
index[hash] = { count: 1, path: childPath, data: val };
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
collectHashesHybrid(val, childPath, index, minKeys);
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
// --- Fingerprint internals (sync, for equals/diff) ---
|
|
104
|
+
|
|
105
|
+
function primitiveFP(val) {
|
|
106
|
+
if (val === null) return '\x00';
|
|
107
|
+
if (val === true) return '\x01\x01';
|
|
108
|
+
if (val === false) return '\x01\x00';
|
|
109
|
+
if (typeof val === 'number') {
|
|
110
|
+
if (Number.isInteger(val)) return '\x02' + String(val);
|
|
111
|
+
return '\x03' + String(val);
|
|
112
|
+
}
|
|
113
|
+
if (typeof val === 'string') return '\x04' + val;
|
|
114
|
+
return '\x04' + String(val);
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
function fingerprint(data) {
|
|
118
|
+
if (data === null || typeof data !== 'object') {
|
|
119
|
+
return primitiveFP(data);
|
|
120
|
+
}
|
|
121
|
+
if (Array.isArray(data)) {
|
|
122
|
+
if (data.length === 0) return xxh128Sync('A:0');
|
|
123
|
+
let buf = 'A:' + data.length;
|
|
124
|
+
for (const item of data) {
|
|
125
|
+
buf += '|' + fingerprint(item);
|
|
126
|
+
}
|
|
127
|
+
return xxh128Sync(buf);
|
|
128
|
+
}
|
|
129
|
+
// Object
|
|
130
|
+
const keys = Object.keys(data).sort();
|
|
131
|
+
if (keys.length === 0) return xxh128Sync('A:0');
|
|
132
|
+
let buf = 'O:' + keys.length;
|
|
133
|
+
for (const key of keys) {
|
|
134
|
+
buf += '|' + key + ':' + fingerprint(data[key]);
|
|
135
|
+
}
|
|
136
|
+
return xxh128Sync(buf);
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
export class TreeHash {
|
|
140
|
+
|
|
141
|
+
// Initialize hash-wasm (call once, or auto-inits on first use)
|
|
142
|
+
static async init() {
|
|
143
|
+
await getHasher();
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
// Hash any value → string hex 32 chars (xxh128) — async
|
|
147
|
+
static async hash(data) {
|
|
148
|
+
await getHasher(); // ensure init
|
|
149
|
+
const fp = fingerprint(data);
|
|
150
|
+
if (fp.length === 32 && /^[0-9a-f]+$/.test(fp)) return fp;
|
|
151
|
+
return xxh128Sync(fp);
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
// Hash a tree with frequency index for dedup — async
|
|
155
|
+
// normalize=true: ksort keys for cross-source dedup
|
|
156
|
+
static async hashTree(data, basePath = '', minKeys = 2, normalize = true) {
|
|
157
|
+
await getHasher(); // ensure init
|
|
158
|
+
|
|
159
|
+
if (normalize) {
|
|
160
|
+
data = recursiveKsort(data);
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
const index = {};
|
|
164
|
+
collectHashesHybrid(data, basePath, index, minKeys);
|
|
165
|
+
|
|
166
|
+
const json = JSON.stringify(data);
|
|
167
|
+
const rootHash = await xxh128(json);
|
|
168
|
+
|
|
169
|
+
return { root_hash: rootHash, index };
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
// Compare two structures: are they identical? — async (needs init)
|
|
173
|
+
static async equals(a, b) {
|
|
174
|
+
if (a === b) return true;
|
|
175
|
+
await getHasher();
|
|
176
|
+
return fingerprint(a) === fingerprint(b);
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
// Structural diff: returns paths where they differ — async (needs init)
|
|
180
|
+
static async diff(a, b, path = '') {
|
|
181
|
+
await getHasher();
|
|
182
|
+
return TreeHash._diffSync(a, b, path);
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
static _diffSync(a, b, path) {
|
|
186
|
+
if (a === b) return [];
|
|
187
|
+
if (fingerprint(a) === fingerprint(b)) return [];
|
|
188
|
+
|
|
189
|
+
if (typeof a !== 'object' || typeof b !== 'object' || a === null || b === null || Array.isArray(a) || Array.isArray(b)) {
|
|
190
|
+
return [{ path, type: 'changed', old: a, new: b }];
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
const diffs = [];
|
|
194
|
+
const allKeys = [...new Set([...Object.keys(a), ...Object.keys(b)])];
|
|
195
|
+
|
|
196
|
+
for (const key of allKeys) {
|
|
197
|
+
const currentPath = path === '' ? String(key) : `${path}.${key}`;
|
|
198
|
+
|
|
199
|
+
if (!(key in a)) {
|
|
200
|
+
diffs.push({ path: currentPath, type: 'added', value: b[key] });
|
|
201
|
+
continue;
|
|
202
|
+
}
|
|
203
|
+
if (!(key in b)) {
|
|
204
|
+
diffs.push({ path: currentPath, type: 'removed', value: a[key] });
|
|
205
|
+
continue;
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
const valA = a[key];
|
|
209
|
+
const valB = b[key];
|
|
210
|
+
|
|
211
|
+
if (valA === valB) continue;
|
|
212
|
+
if (fingerprint(valA) === fingerprint(valB)) continue;
|
|
213
|
+
|
|
214
|
+
if (
|
|
215
|
+
typeof valA === 'object' && typeof valB === 'object' &&
|
|
216
|
+
valA !== null && valB !== null &&
|
|
217
|
+
!Array.isArray(valA) && !Array.isArray(valB)
|
|
218
|
+
) {
|
|
219
|
+
diffs.push(...TreeHash._diffSync(valA, valB, currentPath));
|
|
220
|
+
} else {
|
|
221
|
+
diffs.push({ path: currentPath, type: 'changed', old: valA, new: valB });
|
|
222
|
+
}
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
return diffs;
|
|
226
|
+
}
|
|
227
|
+
}
|
package/src/validator.js
ADDED
|
@@ -0,0 +1,131 @@
|
|
|
1
|
+
// src/validator.js
|
|
2
|
+
// Port of bX\Scon\Validator — SCON data validation
|
|
3
|
+
|
|
4
|
+
const ENFORCE_RULES = {
|
|
5
|
+
'openapi:3.1': {
|
|
6
|
+
required: ['openapi', 'info', 'paths'],
|
|
7
|
+
'info.required': ['title', 'version'],
|
|
8
|
+
},
|
|
9
|
+
'openapi:3.0': {
|
|
10
|
+
required: ['openapi', 'info', 'paths'],
|
|
11
|
+
'info.required': ['title', 'version'],
|
|
12
|
+
},
|
|
13
|
+
};
|
|
14
|
+
|
|
15
|
+
export class Validator {
|
|
16
|
+
|
|
17
|
+
constructor(options = {}) {
|
|
18
|
+
this.mode = options.mode || 'warn';
|
|
19
|
+
this.enforce = options.enforce || null;
|
|
20
|
+
this.warnings = [];
|
|
21
|
+
this.errors = [];
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
// Validate data and return result
|
|
25
|
+
validate(data) {
|
|
26
|
+
this.warnings = [];
|
|
27
|
+
this.errors = [];
|
|
28
|
+
|
|
29
|
+
if (typeof data !== 'object' || data === null) {
|
|
30
|
+
this._addIssue('Root value must be an object or array');
|
|
31
|
+
return this._result();
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
// Apply enforce rules if configured
|
|
35
|
+
if (this.enforce !== null && this.enforce in ENFORCE_RULES) {
|
|
36
|
+
this._enforceSpec(data, ENFORCE_RULES[this.enforce]);
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
return this._result();
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
// Validate a schema definition
|
|
43
|
+
validateSchema(name, schema) {
|
|
44
|
+
this.warnings = [];
|
|
45
|
+
this.errors = [];
|
|
46
|
+
|
|
47
|
+
if (!schema || Object.keys(schema).length === 0) {
|
|
48
|
+
this._addIssue(`Schema '${name}' is empty`);
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
return this._result();
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
// Validate data against a schema (field presence)
|
|
55
|
+
validateAgainstSchema(data, schema, path = '') {
|
|
56
|
+
this.warnings = [];
|
|
57
|
+
this.errors = [];
|
|
58
|
+
|
|
59
|
+
for (const [key, def] of Object.entries(schema)) {
|
|
60
|
+
const fieldPath = path ? `${path}.${key}` : key;
|
|
61
|
+
const isRequired = key.endsWith('!');
|
|
62
|
+
const cleanKey = key.replace(/!$/, '');
|
|
63
|
+
|
|
64
|
+
// PHP isset() returns false for null — match that behavior
|
|
65
|
+
if (isRequired && (data[cleanKey] === undefined || data[cleanKey] === null)) {
|
|
66
|
+
this._addIssue(`Missing required field: ${fieldPath}`);
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
// Check for extra fields
|
|
71
|
+
const schemaKeys = Object.keys(schema).map(k => k.replace(/!$/, ''));
|
|
72
|
+
if (this.mode === 'strict') {
|
|
73
|
+
for (const dataKey of Object.keys(data)) {
|
|
74
|
+
if (!schemaKeys.includes(dataKey)) {
|
|
75
|
+
this._addIssue(`Extra field not in schema: ${path}.${dataKey}`);
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
} else if (this.mode === 'warn') {
|
|
79
|
+
for (const dataKey of Object.keys(data)) {
|
|
80
|
+
if (!schemaKeys.includes(dataKey)) {
|
|
81
|
+
this.warnings.push(`Extra field: ${path}.${dataKey}`);
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
return this._result();
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
_enforceSpec(data, rules) {
|
|
90
|
+
if (rules.required) {
|
|
91
|
+
for (const field of rules.required) {
|
|
92
|
+
if (data[field] === undefined || data[field] === null) {
|
|
93
|
+
this._addIssue(`Missing required field per ${this.enforce}: ${field}`);
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
// Check nested required fields (pattern: parent.required)
|
|
99
|
+
for (const [ruleKey, ruleFields] of Object.entries(rules)) {
|
|
100
|
+
if (ruleKey === 'required') continue;
|
|
101
|
+
if (ruleKey.endsWith('.required')) {
|
|
102
|
+
const parentKey = ruleKey.slice(0, -9);
|
|
103
|
+
if (data[parentKey] !== undefined && data[parentKey] !== null && typeof data[parentKey] === 'object') {
|
|
104
|
+
for (const field of ruleFields) {
|
|
105
|
+
if (data[parentKey][field] === undefined || data[parentKey][field] === null) {
|
|
106
|
+
this._addIssue(`Missing required field per ${this.enforce}: ${parentKey}.${field}`);
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
_addIssue(msg) {
|
|
115
|
+
if (this.mode === 'strict' || this.enforce !== null) {
|
|
116
|
+
this.errors.push(msg);
|
|
117
|
+
} else if (this.mode === 'warn') {
|
|
118
|
+
this.warnings.push(msg);
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
_result() {
|
|
123
|
+
return {
|
|
124
|
+
valid: this.errors.length === 0,
|
|
125
|
+
warnings: this.warnings,
|
|
126
|
+
errors: this.errors,
|
|
127
|
+
mode: this.mode,
|
|
128
|
+
enforce: this.enforce,
|
|
129
|
+
};
|
|
130
|
+
}
|
|
131
|
+
}
|