@unrdf/hooks 5.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +86 -0
- package/package.json +70 -0
- package/src/hooks/builtin-hooks.mjs +296 -0
- package/src/hooks/condition-cache.mjs +109 -0
- package/src/hooks/condition-evaluator.mjs +722 -0
- package/src/hooks/define-hook.mjs +211 -0
- package/src/hooks/effect-sandbox-worker.mjs +170 -0
- package/src/hooks/effect-sandbox.mjs +517 -0
- package/src/hooks/file-resolver.mjs +387 -0
- package/src/hooks/hook-chain-compiler.mjs +236 -0
- package/src/hooks/hook-executor-batching.mjs +277 -0
- package/src/hooks/hook-executor.mjs +465 -0
- package/src/hooks/hook-management.mjs +202 -0
- package/src/hooks/hook-scheduler.mjs +413 -0
- package/src/hooks/knowledge-hook-engine.mjs +358 -0
- package/src/hooks/knowledge-hook-manager.mjs +269 -0
- package/src/hooks/observability.mjs +531 -0
- package/src/hooks/policy-pack.mjs +572 -0
- package/src/hooks/quad-pool.mjs +249 -0
- package/src/hooks/quality-metrics.mjs +544 -0
- package/src/hooks/security/error-sanitizer.mjs +257 -0
- package/src/hooks/security/path-validator.mjs +194 -0
- package/src/hooks/security/sandbox-restrictions.mjs +331 -0
- package/src/hooks/telemetry.mjs +167 -0
- package/src/index.mjs +101 -0
- package/src/security/sandbox/browser-executor.mjs +220 -0
- package/src/security/sandbox/detector.mjs +342 -0
- package/src/security/sandbox/isolated-vm-executor.mjs +373 -0
- package/src/security/sandbox/vm2-executor.mjs +217 -0
- package/src/security/sandbox/worker-executor-runtime.mjs +74 -0
- package/src/security/sandbox/worker-executor.mjs +212 -0
- package/src/security/sandbox-adapter.mjs +141 -0
|
@@ -0,0 +1,387 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @file File URI resolver with content-addressed verification.
|
|
3
|
+
* @module file-resolver
|
|
4
|
+
*
|
|
5
|
+
* @description
|
|
6
|
+
* Production-ready file resolver that loads SPARQL/SHACL files from URIs
|
|
7
|
+
* with SHA-256 hash verification for content integrity and provenance.
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
import { readFile } from 'fs/promises';
|
|
11
|
+
import { createHash } from 'crypto';
|
|
12
|
+
import { _fileURLToPath } from 'url';
|
|
13
|
+
import { _dirname, _join, _resolve } from 'path';
|
|
14
|
+
import { createPathValidator } from './security/path-validator.mjs';
|
|
15
|
+
|
|
16
|
+
/**
|
|
17
|
+
* Resolve a file URI to an absolute path.
|
|
18
|
+
* @param {string} uri - The file URI (e.g., "file://hooks/compliance/largeTx.ask.rq")
|
|
19
|
+
* @param {string} [basePath] - Base path for relative resolution
|
|
20
|
+
* @returns {string} Absolute file path
|
|
21
|
+
*
|
|
22
|
+
* @throws {Error} If URI is invalid or file doesn't exist
|
|
23
|
+
*/
|
|
24
|
+
export function resolveFileUri(uri, basePath = process.cwd()) {
|
|
25
|
+
if (!uri || typeof uri !== 'string') {
|
|
26
|
+
throw new TypeError('resolveFileUri: uri must be a non-empty string');
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
if (!uri.startsWith('file://')) {
|
|
30
|
+
throw new Error(`resolveFileUri: URI must start with 'file://', got: ${uri}`);
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
// Validate path for security vulnerabilities
|
|
34
|
+
const pathValidator = createPathValidator({ basePath });
|
|
35
|
+
const validation = pathValidator.validateFileUri(uri);
|
|
36
|
+
|
|
37
|
+
if (!validation.valid) {
|
|
38
|
+
throw new Error(`Security validation failed: ${validation.violations.join(', ')}`);
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
// Use sanitized path from validator
|
|
42
|
+
return validation.sanitizedPath;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
/**
|
|
46
|
+
* Calculate SHA-256 hash of file content.
|
|
47
|
+
* @param {string} filePath - Path to the file
|
|
48
|
+
* @returns {Promise<string>} Hexadecimal SHA-256 hash
|
|
49
|
+
*
|
|
50
|
+
* @throws {Error} If file cannot be read
|
|
51
|
+
*/
|
|
52
|
+
export async function calculateFileHash(filePath) {
|
|
53
|
+
try {
|
|
54
|
+
const content = await readFile(filePath, 'utf-8');
|
|
55
|
+
const hash = createHash('sha256');
|
|
56
|
+
hash.update(content, 'utf-8');
|
|
57
|
+
return hash.digest('hex');
|
|
58
|
+
} catch (error) {
|
|
59
|
+
throw new Error(`Failed to calculate hash for ${filePath}: ${error.message}`);
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
/**
|
|
64
|
+
* Load file content with hash verification.
|
|
65
|
+
* @param {string} uri - The file URI
|
|
66
|
+
* @param {string} expectedHash - Expected SHA-256 hash
|
|
67
|
+
* @param {string} [basePath] - Base path for resolution
|
|
68
|
+
* @returns {Promise<{content: string, hash: string, path: string}>} File content and metadata
|
|
69
|
+
*
|
|
70
|
+
* @throws {Error} If file cannot be loaded or hash doesn't match
|
|
71
|
+
*/
|
|
72
|
+
export async function loadFileWithHash(uri, expectedHash, basePath = process.cwd()) {
|
|
73
|
+
if (!expectedHash || typeof expectedHash !== 'string') {
|
|
74
|
+
throw new TypeError('loadFileWithHash: expectedHash must be a non-empty string');
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
const filePath = resolveFileUri(uri, basePath);
|
|
78
|
+
|
|
79
|
+
try {
|
|
80
|
+
// Load file content
|
|
81
|
+
const content = await readFile(filePath, 'utf-8');
|
|
82
|
+
|
|
83
|
+
// Calculate actual hash
|
|
84
|
+
const actualHash = await calculateFileHash(filePath);
|
|
85
|
+
|
|
86
|
+
// Verify hash matches
|
|
87
|
+
if (actualHash !== expectedHash) {
|
|
88
|
+
throw new Error(
|
|
89
|
+
`Hash verification failed for ${uri}\n` +
|
|
90
|
+
`Expected: ${expectedHash}\n` +
|
|
91
|
+
`Actual: ${actualHash}\n` +
|
|
92
|
+
`File: ${filePath}`
|
|
93
|
+
);
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
return {
|
|
97
|
+
content,
|
|
98
|
+
hash: actualHash,
|
|
99
|
+
path: filePath,
|
|
100
|
+
uri,
|
|
101
|
+
};
|
|
102
|
+
} catch (error) {
|
|
103
|
+
if (error.code === 'ENOENT') {
|
|
104
|
+
throw new Error(`File not found: ${uri} (resolved to: ${filePath})`);
|
|
105
|
+
}
|
|
106
|
+
throw error;
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
/**
|
|
111
|
+
* Load and parse a SPARQL query file.
|
|
112
|
+
* @param {string} uri - The file URI
|
|
113
|
+
* @param {string} expectedHash - Expected SHA-256 hash
|
|
114
|
+
* @param {string} [basePath] - Base path for resolution
|
|
115
|
+
* @returns {Promise<{sparql: string, hash: string, path: string}>} Parsed SPARQL query
|
|
116
|
+
*
|
|
117
|
+
* @throws {Error} If file cannot be loaded or is not valid SPARQL
|
|
118
|
+
*/
|
|
119
|
+
export async function loadSparqlFile(uri, expectedHash, basePath = process.cwd()) {
|
|
120
|
+
const fileData = await loadFileWithHash(uri, expectedHash, basePath);
|
|
121
|
+
|
|
122
|
+
// Basic SPARQL syntax validation
|
|
123
|
+
const sparql = fileData.content.trim();
|
|
124
|
+
if (!sparql) {
|
|
125
|
+
throw new Error(`Empty SPARQL file: ${uri}`);
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
// Check for common SPARQL keywords
|
|
129
|
+
const sparqlKeywords = ['SELECT', 'ASK', 'CONSTRUCT', 'DESCRIBE', 'INSERT', 'DELETE', 'PREFIX'];
|
|
130
|
+
const hasKeyword = sparqlKeywords.some(keyword => sparql.toUpperCase().includes(keyword));
|
|
131
|
+
|
|
132
|
+
if (!hasKeyword) {
|
|
133
|
+
throw new Error(`Invalid SPARQL syntax in ${uri}: No recognized SPARQL keywords found`);
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
return {
|
|
137
|
+
sparql,
|
|
138
|
+
hash: fileData.hash,
|
|
139
|
+
path: fileData.path,
|
|
140
|
+
uri: fileData.uri,
|
|
141
|
+
};
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
/**
|
|
145
|
+
* Load and parse a SHACL shapes file.
|
|
146
|
+
* @param {string} uri - The file URI
|
|
147
|
+
* @param {string} expectedHash - Expected SHA-256 hash
|
|
148
|
+
* @param {string} [basePath] - Base path for resolution
|
|
149
|
+
* @returns {Promise<{turtle: string, hash: string, path: string}>} Parsed SHACL shapes
|
|
150
|
+
*
|
|
151
|
+
* @throws {Error} If file cannot be loaded or is not valid Turtle
|
|
152
|
+
*/
|
|
153
|
+
export async function loadShaclFile(uri, expectedHash, basePath = process.cwd()) {
|
|
154
|
+
const fileData = await loadFileWithHash(uri, expectedHash, basePath);
|
|
155
|
+
|
|
156
|
+
// Basic Turtle syntax validation
|
|
157
|
+
const turtle = fileData.content.trim();
|
|
158
|
+
if (!turtle) {
|
|
159
|
+
throw new Error(`Empty SHACL file: ${uri}`);
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
// Check for SHACL namespace
|
|
163
|
+
if (!turtle.includes('http://www.w3.org/ns/shacl#')) {
|
|
164
|
+
throw new Error(`Invalid SHACL file ${uri}: Missing SHACL namespace`);
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
// Check for common SHACL terms
|
|
168
|
+
const shaclTerms = ['sh:NodeShape', 'sh:PropertyShape', 'sh:targetClass', 'sh:path'];
|
|
169
|
+
const hasShaclTerm = shaclTerms.some(term => turtle.includes(term));
|
|
170
|
+
|
|
171
|
+
if (!hasShaclTerm) {
|
|
172
|
+
throw new Error(`Invalid SHACL file ${uri}: No recognized SHACL terms found`);
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
return {
|
|
176
|
+
turtle,
|
|
177
|
+
hash: fileData.hash,
|
|
178
|
+
path: fileData.path,
|
|
179
|
+
uri: fileData.uri,
|
|
180
|
+
};
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
/**
|
|
184
|
+
* Create a file resolver with caching.
|
|
185
|
+
* @param {Object} [options] - Resolver options
|
|
186
|
+
* @param {string} [options.basePath] - Base path for file resolution
|
|
187
|
+
* @param {boolean} [options.enableCache] - Enable file content caching
|
|
188
|
+
* @param {number} [options.cacheMaxAge] - Cache max age in milliseconds
|
|
189
|
+
* @returns {Object} File resolver instance
|
|
190
|
+
*/
|
|
191
|
+
export function createFileResolver(options = {}) {
|
|
192
|
+
const {
|
|
193
|
+
basePath = process.cwd(),
|
|
194
|
+
enableCache = true,
|
|
195
|
+
cacheMaxAge = 300000, // 5 minutes
|
|
196
|
+
} = options;
|
|
197
|
+
|
|
198
|
+
const cache = new Map();
|
|
199
|
+
|
|
200
|
+
return {
|
|
201
|
+
/**
|
|
202
|
+
* Load a file with hash verification.
|
|
203
|
+
* @param {string} uri - The file URI
|
|
204
|
+
* @param {string} expectedHash - Expected SHA-256 hash
|
|
205
|
+
* @returns {Promise<Object>} File content and metadata
|
|
206
|
+
*/
|
|
207
|
+
async loadFile(uri, expectedHash) {
|
|
208
|
+
const cacheKey = `${uri}:${expectedHash}`;
|
|
209
|
+
|
|
210
|
+
if (enableCache && cache.has(cacheKey)) {
|
|
211
|
+
const cached = cache.get(cacheKey);
|
|
212
|
+
if (Date.now() - cached.timestamp < cacheMaxAge) {
|
|
213
|
+
return cached.data;
|
|
214
|
+
}
|
|
215
|
+
cache.delete(cacheKey);
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
const data = await loadFileWithHash(uri, expectedHash, basePath);
|
|
219
|
+
|
|
220
|
+
if (enableCache) {
|
|
221
|
+
cache.set(cacheKey, {
|
|
222
|
+
data,
|
|
223
|
+
timestamp: Date.now(),
|
|
224
|
+
});
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
return data;
|
|
228
|
+
},
|
|
229
|
+
|
|
230
|
+
/**
|
|
231
|
+
* Load a SPARQL file.
|
|
232
|
+
* @param {string} uri - The file URI
|
|
233
|
+
* @param {string} expectedHash - Expected SHA-256 hash
|
|
234
|
+
* @returns {Promise<Object>} Parsed SPARQL query
|
|
235
|
+
*/
|
|
236
|
+
async loadSparql(uri, expectedHash) {
|
|
237
|
+
const cacheKey = `sparql:${uri}:${expectedHash}`;
|
|
238
|
+
|
|
239
|
+
if (enableCache && cache.has(cacheKey)) {
|
|
240
|
+
const cached = cache.get(cacheKey);
|
|
241
|
+
if (Date.now() - cached.timestamp < cacheMaxAge) {
|
|
242
|
+
return cached.data;
|
|
243
|
+
}
|
|
244
|
+
cache.delete(cacheKey);
|
|
245
|
+
}
|
|
246
|
+
|
|
247
|
+
const data = await loadSparqlFile(uri, expectedHash, basePath);
|
|
248
|
+
|
|
249
|
+
if (enableCache) {
|
|
250
|
+
cache.set(cacheKey, {
|
|
251
|
+
data,
|
|
252
|
+
timestamp: Date.now(),
|
|
253
|
+
});
|
|
254
|
+
}
|
|
255
|
+
|
|
256
|
+
return data;
|
|
257
|
+
},
|
|
258
|
+
|
|
259
|
+
/**
|
|
260
|
+
* Load a SHACL file.
|
|
261
|
+
* @param {string} uri - The file URI
|
|
262
|
+
* @param {string} expectedHash - Expected SHA-256 hash
|
|
263
|
+
* @returns {Promise<Object>} Parsed SHACL shapes
|
|
264
|
+
*/
|
|
265
|
+
async loadShacl(uri, expectedHash) {
|
|
266
|
+
const cacheKey = `shacl:${uri}:${expectedHash}`;
|
|
267
|
+
|
|
268
|
+
if (enableCache && cache.has(cacheKey)) {
|
|
269
|
+
const cached = cache.get(cacheKey);
|
|
270
|
+
if (Date.now() - cached.timestamp < cacheMaxAge) {
|
|
271
|
+
return cached.data;
|
|
272
|
+
}
|
|
273
|
+
cache.delete(cacheKey);
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
const data = await loadShaclFile(uri, expectedHash, basePath);
|
|
277
|
+
|
|
278
|
+
if (enableCache) {
|
|
279
|
+
cache.set(cacheKey, {
|
|
280
|
+
data,
|
|
281
|
+
timestamp: Date.now(),
|
|
282
|
+
});
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
return data;
|
|
286
|
+
},
|
|
287
|
+
|
|
288
|
+
/**
|
|
289
|
+
* Clear the cache.
|
|
290
|
+
*/
|
|
291
|
+
clearCache() {
|
|
292
|
+
cache.clear();
|
|
293
|
+
},
|
|
294
|
+
|
|
295
|
+
/**
|
|
296
|
+
* Pre-load a file at startup (compute hash once, avoid I/O in hot path)
|
|
297
|
+
* @param {string} uri - The file URI
|
|
298
|
+
* @returns {Promise<Object>} File content, computed hash, and metadata
|
|
299
|
+
*
|
|
300
|
+
* This method loads a file and computes its SHA-256 hash once at startup,
|
|
301
|
+
* then caches the result. Useful for "warming" the cache before transaction execution.
|
|
302
|
+
*/
|
|
303
|
+
async preload(uri) {
|
|
304
|
+
try {
|
|
305
|
+
const filePath = resolveFileUri(uri, basePath);
|
|
306
|
+
const content = await readFile(filePath, 'utf-8');
|
|
307
|
+
const hash = await calculateFileHash(filePath);
|
|
308
|
+
|
|
309
|
+
const data = {
|
|
310
|
+
content,
|
|
311
|
+
hash,
|
|
312
|
+
path: filePath,
|
|
313
|
+
uri,
|
|
314
|
+
};
|
|
315
|
+
|
|
316
|
+
// Cache with preloaded marker (very long TTL)
|
|
317
|
+
const cacheKey = `preloaded:${uri}`;
|
|
318
|
+
if (enableCache) {
|
|
319
|
+
cache.set(cacheKey, {
|
|
320
|
+
data,
|
|
321
|
+
timestamp: Date.now(),
|
|
322
|
+
});
|
|
323
|
+
}
|
|
324
|
+
|
|
325
|
+
return data;
|
|
326
|
+
} catch (error) {
|
|
327
|
+
throw new Error(`Failed to preload ${uri}: ${error.message}`);
|
|
328
|
+
}
|
|
329
|
+
},
|
|
330
|
+
|
|
331
|
+
/**
|
|
332
|
+
* Collect all file URIs referenced in hook conditions
|
|
333
|
+
* @param {Array<Object>} hooks - Array of hook definitions
|
|
334
|
+
* @returns {Set<string>} Set of unique file URIs
|
|
335
|
+
*
|
|
336
|
+
* This method analyzes hook conditions to find all file references
|
|
337
|
+
* that should be preloaded at startup to eliminate File I/O from hot path.
|
|
338
|
+
*/
|
|
339
|
+
collectFileUris(hooks) {
|
|
340
|
+
const uris = new Set();
|
|
341
|
+
|
|
342
|
+
if (!Array.isArray(hooks)) {
|
|
343
|
+
return uris;
|
|
344
|
+
}
|
|
345
|
+
|
|
346
|
+
for (const hook of hooks) {
|
|
347
|
+
if (hook.condition && hook.condition.file) {
|
|
348
|
+
uris.add(hook.condition.file);
|
|
349
|
+
}
|
|
350
|
+
if (hook.conditions && Array.isArray(hook.conditions)) {
|
|
351
|
+
for (const cond of hook.conditions) {
|
|
352
|
+
if (cond.file) {
|
|
353
|
+
uris.add(cond.file);
|
|
354
|
+
}
|
|
355
|
+
}
|
|
356
|
+
}
|
|
357
|
+
}
|
|
358
|
+
|
|
359
|
+
return uris;
|
|
360
|
+
},
|
|
361
|
+
|
|
362
|
+
/**
|
|
363
|
+
* Get cache statistics.
|
|
364
|
+
* @returns {Object} Cache statistics
|
|
365
|
+
*/
|
|
366
|
+
getCacheStats() {
|
|
367
|
+
const now = Date.now();
|
|
368
|
+
let validEntries = 0;
|
|
369
|
+
let expiredEntries = 0;
|
|
370
|
+
|
|
371
|
+
for (const [_key, value] of cache.entries()) {
|
|
372
|
+
if (now - value.timestamp < cacheMaxAge) {
|
|
373
|
+
validEntries++;
|
|
374
|
+
} else {
|
|
375
|
+
expiredEntries++;
|
|
376
|
+
}
|
|
377
|
+
}
|
|
378
|
+
|
|
379
|
+
return {
|
|
380
|
+
totalEntries: cache.size,
|
|
381
|
+
validEntries,
|
|
382
|
+
expiredEntries,
|
|
383
|
+
cacheMaxAge,
|
|
384
|
+
};
|
|
385
|
+
},
|
|
386
|
+
};
|
|
387
|
+
}
|
|
@@ -0,0 +1,236 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @file JIT Hook Chain Compiler for UNRDF Knowledge Hooks.
|
|
3
|
+
* @module hooks/hook-chain-compiler
|
|
4
|
+
*
|
|
5
|
+
* @description
|
|
6
|
+
* Compiles hook chains into optimized single functions to eliminate
|
|
7
|
+
* dispatch overhead (18μs → ~0μs per chain execution).
|
|
8
|
+
*
|
|
9
|
+
* Uses `new Function()` for JIT compilation with CSP fallback.
|
|
10
|
+
*/
|
|
11
|
+
|
|
12
|
+
/**
|
|
13
|
+
* Cache for compiled chain functions.
|
|
14
|
+
* Key: chain signature (hook names joined by '|')
|
|
15
|
+
* Value: compiled function
|
|
16
|
+
* @type {Map<string, Function>}
|
|
17
|
+
*/
|
|
18
|
+
const compiledChains = new Map();
|
|
19
|
+
|
|
20
|
+
/**
|
|
21
|
+
* Check if JIT compilation is available (CSP may block it).
|
|
22
|
+
* @type {boolean}
|
|
23
|
+
*/
|
|
24
|
+
let jitAvailable = true;
|
|
25
|
+
|
|
26
|
+
// Test JIT availability once at module load
|
|
27
|
+
try {
|
|
28
|
+
new Function('return true')();
|
|
29
|
+
} catch {
|
|
30
|
+
jitAvailable = false;
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
/**
|
|
34
|
+
* Check if hook has validation function.
|
|
35
|
+
* @param {object} hook - Hook to check
|
|
36
|
+
* @returns {boolean} - True if hook has validate function
|
|
37
|
+
*/
|
|
38
|
+
function hasValidation(hook) {
|
|
39
|
+
return typeof hook.validate === 'function';
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
/**
|
|
43
|
+
* Check if hook has transformation function.
|
|
44
|
+
* @param {object} hook - Hook to check
|
|
45
|
+
* @returns {boolean} - True if hook has transform function
|
|
46
|
+
*/
|
|
47
|
+
function hasTransformation(hook) {
|
|
48
|
+
return typeof hook.transform === 'function';
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
/**
|
|
52
|
+
* Generate a unique cache key for a hook chain.
|
|
53
|
+
*
|
|
54
|
+
* @param {Array<object>} hooks - Array of validated hooks
|
|
55
|
+
* @returns {string} - Cache key
|
|
56
|
+
*/
|
|
57
|
+
export function getChainKey(hooks) {
|
|
58
|
+
return hooks.map(h => h.name).join('|');
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
/**
|
|
62
|
+
* Compile a hook chain into an optimized function.
|
|
63
|
+
*
|
|
64
|
+
* The compiled function:
|
|
65
|
+
* - Eliminates loop dispatch overhead
|
|
66
|
+
* - Inlines validation/transformation calls
|
|
67
|
+
* - Returns { valid: boolean, quad: Quad } directly
|
|
68
|
+
*
|
|
69
|
+
* @param {Array<object>} hooks - Array of validated hooks
|
|
70
|
+
* @returns {Function} - Compiled chain function (hooks, quad) => result
|
|
71
|
+
*
|
|
72
|
+
* @example
|
|
73
|
+
* const compiledFn = compileHookChain([validator, transformer]);
|
|
74
|
+
* const result = compiledFn(hooks, quad);
|
|
75
|
+
*/
|
|
76
|
+
export function compileHookChain(hooks) {
|
|
77
|
+
const chainKey = getChainKey(hooks);
|
|
78
|
+
|
|
79
|
+
// Return cached compiled function if available
|
|
80
|
+
if (compiledChains.has(chainKey)) {
|
|
81
|
+
return compiledChains.get(chainKey);
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
// Fallback to interpreted mode if JIT not available
|
|
85
|
+
if (!jitAvailable) {
|
|
86
|
+
const interpretedFn = createInterpretedChain(hooks);
|
|
87
|
+
compiledChains.set(chainKey, interpretedFn);
|
|
88
|
+
return interpretedFn;
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
// Generate inline validation steps
|
|
92
|
+
const validationSteps = hooks
|
|
93
|
+
.map((h, i) =>
|
|
94
|
+
hasValidation(h)
|
|
95
|
+
? `if (!hooks[${i}].validate(quad)) return { valid: false, quad, failedHook: hooks[${i}].name };`
|
|
96
|
+
: ''
|
|
97
|
+
)
|
|
98
|
+
.filter(Boolean)
|
|
99
|
+
.join('\n ');
|
|
100
|
+
|
|
101
|
+
// Generate inline transformation steps
|
|
102
|
+
const transformSteps = hooks
|
|
103
|
+
.map((h, i) => (hasTransformation(h) ? `quad = hooks[${i}].transform(quad);` : ''))
|
|
104
|
+
.filter(Boolean)
|
|
105
|
+
.join('\n ');
|
|
106
|
+
|
|
107
|
+
// Compile the chain function
|
|
108
|
+
const fnBody = `
|
|
109
|
+
${validationSteps}
|
|
110
|
+
${transformSteps}
|
|
111
|
+
return { valid: true, quad };
|
|
112
|
+
`;
|
|
113
|
+
|
|
114
|
+
try {
|
|
115
|
+
const compiledFn = new Function('hooks', 'quad', fnBody);
|
|
116
|
+
compiledChains.set(chainKey, compiledFn);
|
|
117
|
+
return compiledFn;
|
|
118
|
+
} catch {
|
|
119
|
+
// Fallback to interpreted mode on compilation error
|
|
120
|
+
jitAvailable = false;
|
|
121
|
+
const interpretedFn = createInterpretedChain(hooks);
|
|
122
|
+
compiledChains.set(chainKey, interpretedFn);
|
|
123
|
+
return interpretedFn;
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
/**
|
|
128
|
+
* Create an interpreted (non-JIT) chain function.
|
|
129
|
+
* Used as fallback when CSP blocks `new Function()`.
|
|
130
|
+
*
|
|
131
|
+
* @param {Array<object>} hooks - Array of validated hooks
|
|
132
|
+
* @returns {Function} - Interpreted chain function
|
|
133
|
+
*/
|
|
134
|
+
function createInterpretedChain(hooks) {
|
|
135
|
+
// Capture hooks array for closure
|
|
136
|
+
const capturedHooks = hooks;
|
|
137
|
+
|
|
138
|
+
return function interpretedChain(_hooks, quad) {
|
|
139
|
+
let currentQuad = quad;
|
|
140
|
+
|
|
141
|
+
for (const hook of capturedHooks) {
|
|
142
|
+
if (hasValidation(hook)) {
|
|
143
|
+
if (!hook.validate(currentQuad)) {
|
|
144
|
+
return { valid: false, quad: currentQuad, failedHook: hook.name };
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
if (hasTransformation(hook)) {
|
|
149
|
+
currentQuad = hook.transform(currentQuad);
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
return { valid: true, quad: currentQuad };
|
|
154
|
+
};
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
/**
|
|
158
|
+
* Compile a validation-only chain (even faster, skips transforms).
|
|
159
|
+
*
|
|
160
|
+
* @param {Array<object>} hooks - Array of validated hooks
|
|
161
|
+
* @returns {Function} - Compiled validation function (hooks, quad) => boolean
|
|
162
|
+
*/
|
|
163
|
+
export function compileValidationOnlyChain(hooks) {
|
|
164
|
+
const chainKey = `validate:${getChainKey(hooks)}`;
|
|
165
|
+
|
|
166
|
+
if (compiledChains.has(chainKey)) {
|
|
167
|
+
return compiledChains.get(chainKey);
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
const validationHooks = hooks.filter(hasValidation);
|
|
171
|
+
|
|
172
|
+
if (!jitAvailable || validationHooks.length === 0) {
|
|
173
|
+
const fn =
|
|
174
|
+
validationHooks.length === 0
|
|
175
|
+
? () => true
|
|
176
|
+
: (_hooks, quad) => validationHooks.every(h => h.validate(quad));
|
|
177
|
+
compiledChains.set(chainKey, fn);
|
|
178
|
+
return fn;
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
// Generate inline validation checks
|
|
182
|
+
const checks = validationHooks.map((_, i) => `hooks[${i}].validate(quad)`).join(' && ');
|
|
183
|
+
|
|
184
|
+
const fnBody = `return ${checks || 'true'};`;
|
|
185
|
+
|
|
186
|
+
try {
|
|
187
|
+
const compiledFn = new Function('hooks', 'quad', fnBody);
|
|
188
|
+
|
|
189
|
+
// Return wrapper that uses validation hooks only
|
|
190
|
+
const wrapper = (_, quad) => compiledFn(validationHooks, quad);
|
|
191
|
+
compiledChains.set(chainKey, wrapper);
|
|
192
|
+
return wrapper;
|
|
193
|
+
} catch {
|
|
194
|
+
const fn = (_hooks, quad) => validationHooks.every(h => h.validate(quad));
|
|
195
|
+
compiledChains.set(chainKey, fn);
|
|
196
|
+
return fn;
|
|
197
|
+
}
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
/**
|
|
201
|
+
* Clear the compiled chain cache.
|
|
202
|
+
* Useful for testing or when hooks are redefined.
|
|
203
|
+
*/
|
|
204
|
+
export function clearCompiledChainCache() {
|
|
205
|
+
compiledChains.clear();
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
/**
|
|
209
|
+
* Get cache statistics.
|
|
210
|
+
*
|
|
211
|
+
* @returns {{size: number, jitAvailable: boolean}} - Cache stats
|
|
212
|
+
*/
|
|
213
|
+
export function getCompilerStats() {
|
|
214
|
+
return {
|
|
215
|
+
size: compiledChains.size,
|
|
216
|
+
jitAvailable,
|
|
217
|
+
};
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
/**
|
|
221
|
+
* Check if JIT compilation is available.
|
|
222
|
+
*
|
|
223
|
+
* @returns {boolean} - True if JIT is available
|
|
224
|
+
*/
|
|
225
|
+
export function isJitAvailable() {
|
|
226
|
+
return jitAvailable;
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
export default {
|
|
230
|
+
compileHookChain,
|
|
231
|
+
compileValidationOnlyChain,
|
|
232
|
+
clearCompiledChainCache,
|
|
233
|
+
getCompilerStats,
|
|
234
|
+
isJitAvailable,
|
|
235
|
+
getChainKey,
|
|
236
|
+
};
|