@unrdf/knowledge-engine 5.0.1 → 26.4.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +23 -17
- package/src/ai-enhanced-search.mjs +371 -0
- package/src/anomaly-detector.mjs +226 -0
- package/src/artifact-generator.mjs +252 -0
- package/src/browser.mjs +1 -1
- package/src/chatman/disruption-arithmetic.mjs +140 -0
- package/src/chatman/market-dynamics.mjs +140 -0
- package/src/chatman/organizational-dynamics.mjs +140 -0
- package/src/chatman/strategic-dynamics.mjs +140 -0
- package/src/chatman-config-loader.mjs +282 -0
- package/src/chatman-engine.mjs +435 -0
- package/src/chatman-operator.mjs +343 -0
- package/src/dark-field-detector.mjs +332 -0
- package/src/formation-theorems.mjs +345 -0
- package/src/index.mjs +20 -2
- package/src/knowledge-hook-manager.mjs +1 -1
- package/src/lockchain-writer-browser.mjs +2 -2
- package/src/observability.mjs +40 -4
- package/src/query-optimizer.mjs +1 -1
- package/src/resolution-layer.mjs +1 -1
- package/src/transaction.mjs +11 -9
- package/README.md +0 -84
- package/src/browser-shims.mjs +0 -343
- package/src/canonicalize.mjs +0 -414
- package/src/condition-cache.mjs +0 -109
- package/src/condition-evaluator.mjs +0 -722
- package/src/dark-matter-core.mjs +0 -742
- package/src/define-hook.mjs +0 -213
- package/src/effect-sandbox-browser.mjs +0 -283
- package/src/effect-sandbox-worker.mjs +0 -170
- package/src/effect-sandbox.mjs +0 -517
- package/src/engines/index.mjs +0 -11
- package/src/engines/rdf-engine.mjs +0 -299
- package/src/file-resolver.mjs +0 -387
- package/src/hook-executor-batching.mjs +0 -277
- package/src/hook-executor.mjs +0 -870
- package/src/hook-management.mjs +0 -150
- package/src/ken-parliment.mjs +0 -119
- package/src/ken.mjs +0 -149
- package/src/knowledge-engine/builtin-rules.mjs +0 -190
- package/src/knowledge-engine/inference-engine.mjs +0 -418
- package/src/knowledge-engine/knowledge-engine.mjs +0 -317
- package/src/knowledge-engine/pattern-dsl.mjs +0 -142
- package/src/knowledge-engine/pattern-matcher.mjs +0 -215
- package/src/knowledge-engine/rules.mjs +0 -184
- package/src/knowledge-engine.mjs +0 -319
- package/src/knowledge-hook-engine.mjs +0 -360
- package/src/knowledge-substrate-core.mjs +0 -927
- package/src/lite.mjs +0 -222
- package/src/lockchain-writer.mjs +0 -602
- package/src/monitoring/andon-signals.mjs +0 -775
- package/src/parse.mjs +0 -290
- package/src/performance-optimizer.mjs +0 -678
- package/src/policy-pack.mjs +0 -572
- package/src/query-cache.mjs +0 -116
- package/src/query.mjs +0 -306
- package/src/reason.mjs +0 -350
- package/src/schemas.mjs +0 -1063
- package/src/security/error-sanitizer.mjs +0 -257
- package/src/security/path-validator.mjs +0 -194
- package/src/security/sandbox-restrictions.mjs +0 -331
- package/src/security-validator.mjs +0 -389
- package/src/store-cache.mjs +0 -137
- package/src/telemetry.mjs +0 -167
- package/src/utils/adaptive-monitor.mjs +0 -746
- package/src/utils/circuit-breaker.mjs +0 -513
- package/src/utils/edge-case-handler.mjs +0 -503
- package/src/utils/memory-manager.mjs +0 -498
- package/src/utils/ring-buffer.mjs +0 -282
- package/src/validate.mjs +0 -319
- package/src/validators/index.mjs +0 -338
|
@@ -1,299 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* @fileoverview Production-grade RDF engine for JavaScript.
|
|
3
|
-
* @version 2.0.0
|
|
4
|
-
* @license MIT
|
|
5
|
-
*/
|
|
6
|
-
|
|
7
|
-
import { Parser, Writer, UnrdfDataFactory as DataFactory } from '@unrdf/core/rdf/n3-justified-only';
|
|
8
|
-
import { createStore } from '@unrdf/oxigraph'; // TODO: Replace with Oxigraph Store
|
|
9
|
-
import { createStore as createOxigraphStore } from '@unrdf/oxigraph';
|
|
10
|
-
import rdf from 'rdf-ext';
|
|
11
|
-
import SHACLValidator from 'rdf-validate-shacl';
|
|
12
|
-
import rdfCanonize from 'rdf-canonize';
|
|
13
|
-
import eyereasoner from 'eyereasoner';
|
|
14
|
-
import _jsonld from 'jsonld';
|
|
15
|
-
|
|
16
|
-
const { namedNode, literal, quad, blankNode, defaultGraph } = DataFactory;
|
|
17
|
-
|
|
18
|
-
/**
|
|
19
|
-
* A comprehensive, production-grade engine for RDF processing in JavaScript.
|
|
20
|
-
* It unifies parsing, serialization, querying, validation, and reasoning.
|
|
21
|
-
*/
|
|
22
|
-
export class RdfEngine {
|
|
23
|
-
/**
|
|
24
|
-
* @param {object} [options] - Configuration options for the engine.
|
|
25
|
-
* @param {string} [options.baseIRI] - The base IRI to use for parsing relative URIs.
|
|
26
|
-
*/
|
|
27
|
-
constructor(options = {}) {
|
|
28
|
-
this.baseIRI = options.baseIRI || 'http://example.org/';
|
|
29
|
-
this.store = createStore();
|
|
30
|
-
}
|
|
31
|
-
|
|
32
|
-
// =================================================================
|
|
33
|
-
// == Core Setup & Store Access
|
|
34
|
-
// =================================================================
|
|
35
|
-
|
|
36
|
-
/**
|
|
37
|
-
* Returns the underlying N3.js Store instance.
|
|
38
|
-
* @returns {import('n3').Store}
|
|
39
|
-
*/
|
|
40
|
-
getStore() {
|
|
41
|
-
return this.store;
|
|
42
|
-
}
|
|
43
|
-
|
|
44
|
-
/**
|
|
45
|
-
* Clears the internal store, removing all quads.
|
|
46
|
-
*/
|
|
47
|
-
clearStore() {
|
|
48
|
-
this.store.removeQuads(this.store.getQuads());
|
|
49
|
-
}
|
|
50
|
-
|
|
51
|
-
// =================================================================
|
|
52
|
-
// == Term Creation
|
|
53
|
-
// =================================================================
|
|
54
|
-
|
|
55
|
-
/**
|
|
56
|
-
*
|
|
57
|
-
*/
|
|
58
|
-
namedNode(value) {
|
|
59
|
-
return namedNode(value);
|
|
60
|
-
}
|
|
61
|
-
/**
|
|
62
|
-
*
|
|
63
|
-
*/
|
|
64
|
-
literal(value, langOrDt) {
|
|
65
|
-
return literal(value, langOrDt);
|
|
66
|
-
}
|
|
67
|
-
/**
|
|
68
|
-
*
|
|
69
|
-
*/
|
|
70
|
-
blankNode(value) {
|
|
71
|
-
return blankNode(value);
|
|
72
|
-
}
|
|
73
|
-
/**
|
|
74
|
-
*
|
|
75
|
-
*/
|
|
76
|
-
quad(s, p, o, g = defaultGraph()) {
|
|
77
|
-
return quad(s, p, o, g);
|
|
78
|
-
}
|
|
79
|
-
|
|
80
|
-
// =================================================================
|
|
81
|
-
// == Parsing & Serialization
|
|
82
|
-
// =================================================================
|
|
83
|
-
|
|
84
|
-
/**
|
|
85
|
-
* Parses a Turtle string and adds the quads to the internal store.
|
|
86
|
-
* @param {string} ttl - The Turtle string to parse.
|
|
87
|
-
* @returns {import('n3').Store} The engine's store instance.
|
|
88
|
-
*/
|
|
89
|
-
parseTurtle(ttl) {
|
|
90
|
-
const quads = new Parser({ baseIRI: this.baseIRI }).parse(ttl);
|
|
91
|
-
this.store.addQuads(quads);
|
|
92
|
-
return this.store;
|
|
93
|
-
}
|
|
94
|
-
|
|
95
|
-
/**
|
|
96
|
-
* Serializes a store to a Turtle string.
|
|
97
|
-
* @param {import('n3').Store} [store=this.store] - The store to serialize.
|
|
98
|
-
* @param {object} [options] - N3.js Writer options.
|
|
99
|
-
* @returns {string}
|
|
100
|
-
*/
|
|
101
|
-
serializeTurtle(store = this.store, options = {}) {
|
|
102
|
-
const writer = new Writer({ ...options, format: 'Turtle' });
|
|
103
|
-
return writer.quadsToString(store.getQuads());
|
|
104
|
-
}
|
|
105
|
-
|
|
106
|
-
/**
|
|
107
|
-
* Serializes a store to a canonical N-Quads string.
|
|
108
|
-
* @param {import('n3').Store} [store=this.store] - The store to serialize.
|
|
109
|
-
* @returns {string}
|
|
110
|
-
*/
|
|
111
|
-
serializeNQuads(store = this.store) {
|
|
112
|
-
const writer = new Writer({ format: 'N-Quads' });
|
|
113
|
-
return writer.quadsToString(store.getQuads());
|
|
114
|
-
}
|
|
115
|
-
|
|
116
|
-
// =================================================================
|
|
117
|
-
// == SPARQL Querying
|
|
118
|
-
// =================================================================
|
|
119
|
-
|
|
120
|
-
/**
|
|
121
|
-
* Executes a read-only SPARQL query (SELECT, ASK, CONSTRUCT) against the store.
|
|
122
|
-
* @param {string} sparql - The SPARQL query string.
|
|
123
|
-
* @returns {Promise<Array<object>|boolean|import('n3').Store>} The query result.
|
|
124
|
-
*/
|
|
125
|
-
async query(sparql) {
|
|
126
|
-
// Remove PREFIX declarations to find the actual query type
|
|
127
|
-
const queryWithoutPrefixes = sparql.replace(/^PREFIX\s+[^\s]+\s+<[^>]+>\s*/gm, '').trim();
|
|
128
|
-
const queryType = queryWithoutPrefixes.toUpperCase().split(/\s+/)[0];
|
|
129
|
-
|
|
130
|
-
// Convert n3.Store to Oxigraph store and execute query synchronously
|
|
131
|
-
const oxigraphStore = createOxigraphStore(Array.from(this.store.getQuads()));
|
|
132
|
-
const result = oxigraphStore.query(sparql);
|
|
133
|
-
|
|
134
|
-
switch (queryType) {
|
|
135
|
-
case 'SELECT': {
|
|
136
|
-
// Oxigraph returns array of binding objects for SELECT
|
|
137
|
-
const rows = Array.isArray(result)
|
|
138
|
-
? result.map(item => {
|
|
139
|
-
const entry = {};
|
|
140
|
-
if (item && typeof item === 'object') {
|
|
141
|
-
for (const [key, val] of Object.entries(item)) {
|
|
142
|
-
entry[key] = val && val.value ? val.value : val;
|
|
143
|
-
}
|
|
144
|
-
}
|
|
145
|
-
return entry;
|
|
146
|
-
})
|
|
147
|
-
: [];
|
|
148
|
-
const variables = rows.length > 0 ? Object.keys(rows[0]) : [];
|
|
149
|
-
return { type: 'select', rows, variables };
|
|
150
|
-
}
|
|
151
|
-
case 'ASK': {
|
|
152
|
-
// Oxigraph returns boolean for ASK
|
|
153
|
-
const boolean = typeof result === 'boolean' ? result : false;
|
|
154
|
-
return { type: 'ask', boolean };
|
|
155
|
-
}
|
|
156
|
-
case 'CONSTRUCT': {
|
|
157
|
-
// Oxigraph returns array of quads for CONSTRUCT
|
|
158
|
-
const quads = Array.isArray(result) ? result : [];
|
|
159
|
-
return {
|
|
160
|
-
type: 'construct',
|
|
161
|
-
store: createStore(quads),
|
|
162
|
-
};
|
|
163
|
-
}
|
|
164
|
-
case 'DESCRIBE': {
|
|
165
|
-
// Oxigraph returns array of quads for DESCRIBE
|
|
166
|
-
const quads = Array.isArray(result) ? result : [];
|
|
167
|
-
return {
|
|
168
|
-
type: 'describe',
|
|
169
|
-
store: createStore(quads),
|
|
170
|
-
};
|
|
171
|
-
}
|
|
172
|
-
case 'INSERT':
|
|
173
|
-
case 'UPDATE':
|
|
174
|
-
case 'DELETE':
|
|
175
|
-
throw new Error(
|
|
176
|
-
`Query type "${queryType}" is not supported. Use the update() helper for writes.`
|
|
177
|
-
);
|
|
178
|
-
default:
|
|
179
|
-
throw new Error(`Query type "${queryType}" is not supported by query().`);
|
|
180
|
-
}
|
|
181
|
-
}
|
|
182
|
-
|
|
183
|
-
/**
|
|
184
|
-
* Executes a SPARQL UPDATE operation (INSERT, DELETE, etc.) against the store.
|
|
185
|
-
* @param {string} sparql - The SPARQL UPDATE query string.
|
|
186
|
-
* @returns {Promise<object>} The update result.
|
|
187
|
-
*/
|
|
188
|
-
async update(sparql) {
|
|
189
|
-
// Remove PREFIX declarations to find the actual query type
|
|
190
|
-
const queryWithoutPrefixes = sparql.replace(/PREFIX\s+[^\s]+\s+<[^>]+>\s*/g, '').trim();
|
|
191
|
-
|
|
192
|
-
// If still starts with PREFIX, try a different approach
|
|
193
|
-
if (queryWithoutPrefixes.startsWith('PREFIX')) {
|
|
194
|
-
const lines = queryWithoutPrefixes.split('\n');
|
|
195
|
-
const nonPrefixLines = lines.filter(line => !line.trim().startsWith('PREFIX'));
|
|
196
|
-
const result = nonPrefixLines.join('\n').trim();
|
|
197
|
-
return this.update(result);
|
|
198
|
-
}
|
|
199
|
-
const queryType = queryWithoutPrefixes.toUpperCase().split(/\s+/)[0];
|
|
200
|
-
|
|
201
|
-
// For now, we'll implement a simple INSERT DATA operation
|
|
202
|
-
if (queryType === 'INSERT' && queryWithoutPrefixes.includes('INSERT DATA')) {
|
|
203
|
-
// Parse the INSERT DATA operation - need to handle PREFIX declarations
|
|
204
|
-
const insertMatch = sparql.match(/INSERT\s+DATA\s*\{([^}]+)\}/is);
|
|
205
|
-
if (insertMatch) {
|
|
206
|
-
const turtleData = insertMatch[1].trim();
|
|
207
|
-
|
|
208
|
-
// Extract PREFIX declarations from the original query
|
|
209
|
-
const prefixMatches = sparql.match(/PREFIX\s+[^\s]+\s+<[^>]+>/gi) || [];
|
|
210
|
-
const prefixes = prefixMatches.join('\n');
|
|
211
|
-
|
|
212
|
-
// Combine prefixes with the data for parsing
|
|
213
|
-
const dataToParse = prefixes + (prefixes ? '\n' : '') + turtleData;
|
|
214
|
-
|
|
215
|
-
// Parse the Turtle data with PREFIX declarations and add to store
|
|
216
|
-
const parser = new Parser();
|
|
217
|
-
const quads = parser.parse(dataToParse);
|
|
218
|
-
for (const quad of quads) {
|
|
219
|
-
this.store.add(quad);
|
|
220
|
-
}
|
|
221
|
-
return { type: 'update', ok: true, inserted: quads.length };
|
|
222
|
-
}
|
|
223
|
-
}
|
|
224
|
-
|
|
225
|
-
// For other UPDATE operations, throw an error for now
|
|
226
|
-
throw new Error(`UPDATE operation "${queryType}" not yet implemented`);
|
|
227
|
-
}
|
|
228
|
-
|
|
229
|
-
// =================================================================
|
|
230
|
-
// == SHACL Validation
|
|
231
|
-
// =================================================================
|
|
232
|
-
|
|
233
|
-
/**
|
|
234
|
-
* Validates a data store against a set of SHACL shapes.
|
|
235
|
-
* @param {import('n3').Store} dataStore - The store containing data to validate.
|
|
236
|
-
* @param {import('n3').Store|string} shapes - The store or Turtle string containing SHACL shapes.
|
|
237
|
-
* @returns {{conforms: boolean, results: Array<object>}} A validation report.
|
|
238
|
-
*/
|
|
239
|
-
validateShacl(dataStore, shapes) {
|
|
240
|
-
const shapesStore =
|
|
241
|
-
typeof shapes === 'string' ? createStore(new Parser().parse(shapes)) : shapes;
|
|
242
|
-
|
|
243
|
-
const validator = new SHACLValidator(rdf.dataset([...shapesStore]));
|
|
244
|
-
const report = validator.validate(rdf.dataset([...dataStore]));
|
|
245
|
-
|
|
246
|
-
return {
|
|
247
|
-
conforms: report.conforms,
|
|
248
|
-
results: (report.results || []).map(r => ({
|
|
249
|
-
message: r.message?.[0]?.value || null,
|
|
250
|
-
path: r.path?.value || null,
|
|
251
|
-
focusNode: r.focusNode?.value || null,
|
|
252
|
-
})),
|
|
253
|
-
};
|
|
254
|
-
}
|
|
255
|
-
|
|
256
|
-
// =================================================================
|
|
257
|
-
// == Reasoning
|
|
258
|
-
// =================================================================
|
|
259
|
-
|
|
260
|
-
/**
|
|
261
|
-
* Infers new knowledge by applying N3 Rules to a data store.
|
|
262
|
-
* @param {import('n3').Store} dataStore - The store containing data.
|
|
263
|
-
* @param {import('n3').Store|string} rules - The store or Turtle string containing N3 Rules.
|
|
264
|
-
* @returns {Promise<import('n3').Store>} A new store containing both original and inferred quads.
|
|
265
|
-
*/
|
|
266
|
-
async reason(dataStore, rules) {
|
|
267
|
-
const rulesN3 = typeof rules === 'string' ? rules : this.serializeTurtle(rules);
|
|
268
|
-
const dataN3 = this.serializeTurtle(dataStore);
|
|
269
|
-
const { executeBasicEyeQuery } = await import('eyereasoner');
|
|
270
|
-
const inferredN3 = await executeBasicEyeQuery(eyereasoner.SWIPL, dataN3, rulesN3);
|
|
271
|
-
return createStore(new Parser().parse(inferredN3));
|
|
272
|
-
}
|
|
273
|
-
|
|
274
|
-
// =================================================================
|
|
275
|
-
// == Canonicalization & Isomorphism
|
|
276
|
-
// =================================================================
|
|
277
|
-
|
|
278
|
-
/**
|
|
279
|
-
* Produces a canonical representation of a store's quads using URDNA2015.
|
|
280
|
-
* @param {import('n3').Store} store - The store to canonicalize.
|
|
281
|
-
* @returns {string} The canonical N-Quads string.
|
|
282
|
-
*/
|
|
283
|
-
canonicalize(store) {
|
|
284
|
-
return rdfCanonize.canonizeSync(store.getQuads(), {
|
|
285
|
-
algorithm: 'URDNA2015',
|
|
286
|
-
});
|
|
287
|
-
}
|
|
288
|
-
|
|
289
|
-
/**
|
|
290
|
-
* Checks if two stores are logically equivalent (isomorphic).
|
|
291
|
-
* @param {import('n3').Store} storeA
|
|
292
|
-
* @param {import('n3').Store} storeB
|
|
293
|
-
* @returns {boolean}
|
|
294
|
-
*/
|
|
295
|
-
isIsomorphic(storeA, storeB) {
|
|
296
|
-
if (storeA.size !== storeB.size) return false;
|
|
297
|
-
return this.canonicalize(storeA) === this.canonicalize(storeB);
|
|
298
|
-
}
|
|
299
|
-
}
|
package/src/file-resolver.mjs
DELETED
|
@@ -1,387 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* @file File URI resolver with content-addressed verification.
|
|
3
|
-
* @module file-resolver
|
|
4
|
-
*
|
|
5
|
-
* @description
|
|
6
|
-
* Production-ready file resolver that loads SPARQL/SHACL files from URIs
|
|
7
|
-
* with SHA-256 hash verification for content integrity and provenance.
|
|
8
|
-
*/
|
|
9
|
-
|
|
10
|
-
import { readFile } from 'fs/promises';
|
|
11
|
-
import { createHash } from 'crypto';
|
|
12
|
-
import { _fileURLToPath } from 'url';
|
|
13
|
-
import { _dirname, _join, _resolve } from 'path';
|
|
14
|
-
import { createPathValidator } from './security/path-validator.mjs';
|
|
15
|
-
|
|
16
|
-
/**
|
|
17
|
-
* Resolve a file URI to an absolute path.
|
|
18
|
-
* @param {string} uri - The file URI (e.g., "file://hooks/compliance/largeTx.ask.rq")
|
|
19
|
-
* @param {string} [basePath] - Base path for relative resolution
|
|
20
|
-
* @returns {string} Absolute file path
|
|
21
|
-
*
|
|
22
|
-
* @throws {Error} If URI is invalid or file doesn't exist
|
|
23
|
-
*/
|
|
24
|
-
export function resolveFileUri(uri, basePath = process.cwd()) {
|
|
25
|
-
if (!uri || typeof uri !== 'string') {
|
|
26
|
-
throw new TypeError('resolveFileUri: uri must be a non-empty string');
|
|
27
|
-
}
|
|
28
|
-
|
|
29
|
-
if (!uri.startsWith('file://')) {
|
|
30
|
-
throw new Error(`resolveFileUri: URI must start with 'file://', got: ${uri}`);
|
|
31
|
-
}
|
|
32
|
-
|
|
33
|
-
// Validate path for security vulnerabilities
|
|
34
|
-
const pathValidator = createPathValidator({ basePath });
|
|
35
|
-
const validation = pathValidator.validateFileUri(uri);
|
|
36
|
-
|
|
37
|
-
if (!validation.valid) {
|
|
38
|
-
throw new Error(`Security validation failed: ${validation.violations.join(', ')}`);
|
|
39
|
-
}
|
|
40
|
-
|
|
41
|
-
// Use sanitized path from validator
|
|
42
|
-
return validation.sanitizedPath;
|
|
43
|
-
}
|
|
44
|
-
|
|
45
|
-
/**
|
|
46
|
-
* Calculate SHA-256 hash of file content.
|
|
47
|
-
* @param {string} filePath - Path to the file
|
|
48
|
-
* @returns {Promise<string>} Hexadecimal SHA-256 hash
|
|
49
|
-
*
|
|
50
|
-
* @throws {Error} If file cannot be read
|
|
51
|
-
*/
|
|
52
|
-
export async function calculateFileHash(filePath) {
|
|
53
|
-
try {
|
|
54
|
-
const content = await readFile(filePath, 'utf-8');
|
|
55
|
-
const hash = createHash('sha256');
|
|
56
|
-
hash.update(content, 'utf-8');
|
|
57
|
-
return hash.digest('hex');
|
|
58
|
-
} catch (error) {
|
|
59
|
-
throw new Error(`Failed to calculate hash for ${filePath}: ${error.message}`);
|
|
60
|
-
}
|
|
61
|
-
}
|
|
62
|
-
|
|
63
|
-
/**
|
|
64
|
-
* Load file content with hash verification.
|
|
65
|
-
* @param {string} uri - The file URI
|
|
66
|
-
* @param {string} expectedHash - Expected SHA-256 hash
|
|
67
|
-
* @param {string} [basePath] - Base path for resolution
|
|
68
|
-
* @returns {Promise<{content: string, hash: string, path: string}>} File content and metadata
|
|
69
|
-
*
|
|
70
|
-
* @throws {Error} If file cannot be loaded or hash doesn't match
|
|
71
|
-
*/
|
|
72
|
-
export async function loadFileWithHash(uri, expectedHash, basePath = process.cwd()) {
|
|
73
|
-
if (!expectedHash || typeof expectedHash !== 'string') {
|
|
74
|
-
throw new TypeError('loadFileWithHash: expectedHash must be a non-empty string');
|
|
75
|
-
}
|
|
76
|
-
|
|
77
|
-
const filePath = resolveFileUri(uri, basePath);
|
|
78
|
-
|
|
79
|
-
try {
|
|
80
|
-
// Load file content
|
|
81
|
-
const content = await readFile(filePath, 'utf-8');
|
|
82
|
-
|
|
83
|
-
// Calculate actual hash
|
|
84
|
-
const actualHash = await calculateFileHash(filePath);
|
|
85
|
-
|
|
86
|
-
// Verify hash matches
|
|
87
|
-
if (actualHash !== expectedHash) {
|
|
88
|
-
throw new Error(
|
|
89
|
-
`Hash verification failed for ${uri}\n` +
|
|
90
|
-
`Expected: ${expectedHash}\n` +
|
|
91
|
-
`Actual: ${actualHash}\n` +
|
|
92
|
-
`File: ${filePath}`
|
|
93
|
-
);
|
|
94
|
-
}
|
|
95
|
-
|
|
96
|
-
return {
|
|
97
|
-
content,
|
|
98
|
-
hash: actualHash,
|
|
99
|
-
path: filePath,
|
|
100
|
-
uri,
|
|
101
|
-
};
|
|
102
|
-
} catch (error) {
|
|
103
|
-
if (error.code === 'ENOENT') {
|
|
104
|
-
throw new Error(`File not found: ${uri} (resolved to: ${filePath})`);
|
|
105
|
-
}
|
|
106
|
-
throw error;
|
|
107
|
-
}
|
|
108
|
-
}
|
|
109
|
-
|
|
110
|
-
/**
|
|
111
|
-
* Load and parse a SPARQL query file.
|
|
112
|
-
* @param {string} uri - The file URI
|
|
113
|
-
* @param {string} expectedHash - Expected SHA-256 hash
|
|
114
|
-
* @param {string} [basePath] - Base path for resolution
|
|
115
|
-
* @returns {Promise<{sparql: string, hash: string, path: string}>} Parsed SPARQL query
|
|
116
|
-
*
|
|
117
|
-
* @throws {Error} If file cannot be loaded or is not valid SPARQL
|
|
118
|
-
*/
|
|
119
|
-
export async function loadSparqlFile(uri, expectedHash, basePath = process.cwd()) {
|
|
120
|
-
const fileData = await loadFileWithHash(uri, expectedHash, basePath);
|
|
121
|
-
|
|
122
|
-
// Basic SPARQL syntax validation
|
|
123
|
-
const sparql = fileData.content.trim();
|
|
124
|
-
if (!sparql) {
|
|
125
|
-
throw new Error(`Empty SPARQL file: ${uri}`);
|
|
126
|
-
}
|
|
127
|
-
|
|
128
|
-
// Check for common SPARQL keywords
|
|
129
|
-
const sparqlKeywords = ['SELECT', 'ASK', 'CONSTRUCT', 'DESCRIBE', 'INSERT', 'DELETE', 'PREFIX'];
|
|
130
|
-
const hasKeyword = sparqlKeywords.some(keyword => sparql.toUpperCase().includes(keyword));
|
|
131
|
-
|
|
132
|
-
if (!hasKeyword) {
|
|
133
|
-
throw new Error(`Invalid SPARQL syntax in ${uri}: No recognized SPARQL keywords found`);
|
|
134
|
-
}
|
|
135
|
-
|
|
136
|
-
return {
|
|
137
|
-
sparql,
|
|
138
|
-
hash: fileData.hash,
|
|
139
|
-
path: fileData.path,
|
|
140
|
-
uri: fileData.uri,
|
|
141
|
-
};
|
|
142
|
-
}
|
|
143
|
-
|
|
144
|
-
/**
|
|
145
|
-
* Load and parse a SHACL shapes file.
|
|
146
|
-
* @param {string} uri - The file URI
|
|
147
|
-
* @param {string} expectedHash - Expected SHA-256 hash
|
|
148
|
-
* @param {string} [basePath] - Base path for resolution
|
|
149
|
-
* @returns {Promise<{turtle: string, hash: string, path: string}>} Parsed SHACL shapes
|
|
150
|
-
*
|
|
151
|
-
* @throws {Error} If file cannot be loaded or is not valid Turtle
|
|
152
|
-
*/
|
|
153
|
-
export async function loadShaclFile(uri, expectedHash, basePath = process.cwd()) {
|
|
154
|
-
const fileData = await loadFileWithHash(uri, expectedHash, basePath);
|
|
155
|
-
|
|
156
|
-
// Basic Turtle syntax validation
|
|
157
|
-
const turtle = fileData.content.trim();
|
|
158
|
-
if (!turtle) {
|
|
159
|
-
throw new Error(`Empty SHACL file: ${uri}`);
|
|
160
|
-
}
|
|
161
|
-
|
|
162
|
-
// Check for SHACL namespace
|
|
163
|
-
if (!turtle.includes('http://www.w3.org/ns/shacl#')) {
|
|
164
|
-
throw new Error(`Invalid SHACL file ${uri}: Missing SHACL namespace`);
|
|
165
|
-
}
|
|
166
|
-
|
|
167
|
-
// Check for common SHACL terms
|
|
168
|
-
const shaclTerms = ['sh:NodeShape', 'sh:PropertyShape', 'sh:targetClass', 'sh:path'];
|
|
169
|
-
const hasShaclTerm = shaclTerms.some(term => turtle.includes(term));
|
|
170
|
-
|
|
171
|
-
if (!hasShaclTerm) {
|
|
172
|
-
throw new Error(`Invalid SHACL file ${uri}: No recognized SHACL terms found`);
|
|
173
|
-
}
|
|
174
|
-
|
|
175
|
-
return {
|
|
176
|
-
turtle,
|
|
177
|
-
hash: fileData.hash,
|
|
178
|
-
path: fileData.path,
|
|
179
|
-
uri: fileData.uri,
|
|
180
|
-
};
|
|
181
|
-
}
|
|
182
|
-
|
|
183
|
-
/**
|
|
184
|
-
* Create a file resolver with caching.
|
|
185
|
-
* @param {Object} [options] - Resolver options
|
|
186
|
-
* @param {string} [options.basePath] - Base path for file resolution
|
|
187
|
-
* @param {boolean} [options.enableCache] - Enable file content caching
|
|
188
|
-
* @param {number} [options.cacheMaxAge] - Cache max age in milliseconds
|
|
189
|
-
* @returns {Object} File resolver instance
|
|
190
|
-
*/
|
|
191
|
-
export function createFileResolver(options = {}) {
|
|
192
|
-
const {
|
|
193
|
-
basePath = process.cwd(),
|
|
194
|
-
enableCache = true,
|
|
195
|
-
cacheMaxAge = 300000, // 5 minutes
|
|
196
|
-
} = options;
|
|
197
|
-
|
|
198
|
-
const cache = new Map();
|
|
199
|
-
|
|
200
|
-
return {
|
|
201
|
-
/**
|
|
202
|
-
* Load a file with hash verification.
|
|
203
|
-
* @param {string} uri - The file URI
|
|
204
|
-
* @param {string} expectedHash - Expected SHA-256 hash
|
|
205
|
-
* @returns {Promise<Object>} File content and metadata
|
|
206
|
-
*/
|
|
207
|
-
async loadFile(uri, expectedHash) {
|
|
208
|
-
const cacheKey = `${uri}:${expectedHash}`;
|
|
209
|
-
|
|
210
|
-
if (enableCache && cache.has(cacheKey)) {
|
|
211
|
-
const cached = cache.get(cacheKey);
|
|
212
|
-
if (Date.now() - cached.timestamp < cacheMaxAge) {
|
|
213
|
-
return cached.data;
|
|
214
|
-
}
|
|
215
|
-
cache.delete(cacheKey);
|
|
216
|
-
}
|
|
217
|
-
|
|
218
|
-
const data = await loadFileWithHash(uri, expectedHash, basePath);
|
|
219
|
-
|
|
220
|
-
if (enableCache) {
|
|
221
|
-
cache.set(cacheKey, {
|
|
222
|
-
data,
|
|
223
|
-
timestamp: Date.now(),
|
|
224
|
-
});
|
|
225
|
-
}
|
|
226
|
-
|
|
227
|
-
return data;
|
|
228
|
-
},
|
|
229
|
-
|
|
230
|
-
/**
|
|
231
|
-
* Load a SPARQL file.
|
|
232
|
-
* @param {string} uri - The file URI
|
|
233
|
-
* @param {string} expectedHash - Expected SHA-256 hash
|
|
234
|
-
* @returns {Promise<Object>} Parsed SPARQL query
|
|
235
|
-
*/
|
|
236
|
-
async loadSparql(uri, expectedHash) {
|
|
237
|
-
const cacheKey = `sparql:${uri}:${expectedHash}`;
|
|
238
|
-
|
|
239
|
-
if (enableCache && cache.has(cacheKey)) {
|
|
240
|
-
const cached = cache.get(cacheKey);
|
|
241
|
-
if (Date.now() - cached.timestamp < cacheMaxAge) {
|
|
242
|
-
return cached.data;
|
|
243
|
-
}
|
|
244
|
-
cache.delete(cacheKey);
|
|
245
|
-
}
|
|
246
|
-
|
|
247
|
-
const data = await loadSparqlFile(uri, expectedHash, basePath);
|
|
248
|
-
|
|
249
|
-
if (enableCache) {
|
|
250
|
-
cache.set(cacheKey, {
|
|
251
|
-
data,
|
|
252
|
-
timestamp: Date.now(),
|
|
253
|
-
});
|
|
254
|
-
}
|
|
255
|
-
|
|
256
|
-
return data;
|
|
257
|
-
},
|
|
258
|
-
|
|
259
|
-
/**
|
|
260
|
-
* Load a SHACL file.
|
|
261
|
-
* @param {string} uri - The file URI
|
|
262
|
-
* @param {string} expectedHash - Expected SHA-256 hash
|
|
263
|
-
* @returns {Promise<Object>} Parsed SHACL shapes
|
|
264
|
-
*/
|
|
265
|
-
async loadShacl(uri, expectedHash) {
|
|
266
|
-
const cacheKey = `shacl:${uri}:${expectedHash}`;
|
|
267
|
-
|
|
268
|
-
if (enableCache && cache.has(cacheKey)) {
|
|
269
|
-
const cached = cache.get(cacheKey);
|
|
270
|
-
if (Date.now() - cached.timestamp < cacheMaxAge) {
|
|
271
|
-
return cached.data;
|
|
272
|
-
}
|
|
273
|
-
cache.delete(cacheKey);
|
|
274
|
-
}
|
|
275
|
-
|
|
276
|
-
const data = await loadShaclFile(uri, expectedHash, basePath);
|
|
277
|
-
|
|
278
|
-
if (enableCache) {
|
|
279
|
-
cache.set(cacheKey, {
|
|
280
|
-
data,
|
|
281
|
-
timestamp: Date.now(),
|
|
282
|
-
});
|
|
283
|
-
}
|
|
284
|
-
|
|
285
|
-
return data;
|
|
286
|
-
},
|
|
287
|
-
|
|
288
|
-
/**
|
|
289
|
-
* Clear the cache.
|
|
290
|
-
*/
|
|
291
|
-
clearCache() {
|
|
292
|
-
cache.clear();
|
|
293
|
-
},
|
|
294
|
-
|
|
295
|
-
/**
|
|
296
|
-
* Pre-load a file at startup (compute hash once, avoid I/O in hot path)
|
|
297
|
-
* @param {string} uri - The file URI
|
|
298
|
-
* @returns {Promise<Object>} File content, computed hash, and metadata
|
|
299
|
-
*
|
|
300
|
-
* This method loads a file and computes its SHA-256 hash once at startup,
|
|
301
|
-
* then caches the result. Useful for "warming" the cache before transaction execution.
|
|
302
|
-
*/
|
|
303
|
-
async preload(uri) {
|
|
304
|
-
try {
|
|
305
|
-
const filePath = resolveFileUri(uri, basePath);
|
|
306
|
-
const content = await readFile(filePath, 'utf-8');
|
|
307
|
-
const hash = await calculateFileHash(filePath);
|
|
308
|
-
|
|
309
|
-
const data = {
|
|
310
|
-
content,
|
|
311
|
-
hash,
|
|
312
|
-
path: filePath,
|
|
313
|
-
uri,
|
|
314
|
-
};
|
|
315
|
-
|
|
316
|
-
// Cache with preloaded marker (very long TTL)
|
|
317
|
-
const cacheKey = `preloaded:${uri}`;
|
|
318
|
-
if (enableCache) {
|
|
319
|
-
cache.set(cacheKey, {
|
|
320
|
-
data,
|
|
321
|
-
timestamp: Date.now(),
|
|
322
|
-
});
|
|
323
|
-
}
|
|
324
|
-
|
|
325
|
-
return data;
|
|
326
|
-
} catch (error) {
|
|
327
|
-
throw new Error(`Failed to preload ${uri}: ${error.message}`);
|
|
328
|
-
}
|
|
329
|
-
},
|
|
330
|
-
|
|
331
|
-
/**
|
|
332
|
-
* Collect all file URIs referenced in hook conditions
|
|
333
|
-
* @param {Array<Object>} hooks - Array of hook definitions
|
|
334
|
-
* @returns {Set<string>} Set of unique file URIs
|
|
335
|
-
*
|
|
336
|
-
* This method analyzes hook conditions to find all file references
|
|
337
|
-
* that should be preloaded at startup to eliminate File I/O from hot path.
|
|
338
|
-
*/
|
|
339
|
-
collectFileUris(hooks) {
|
|
340
|
-
const uris = new Set();
|
|
341
|
-
|
|
342
|
-
if (!Array.isArray(hooks)) {
|
|
343
|
-
return uris;
|
|
344
|
-
}
|
|
345
|
-
|
|
346
|
-
for (const hook of hooks) {
|
|
347
|
-
if (hook.condition && hook.condition.file) {
|
|
348
|
-
uris.add(hook.condition.file);
|
|
349
|
-
}
|
|
350
|
-
if (hook.conditions && Array.isArray(hook.conditions)) {
|
|
351
|
-
for (const cond of hook.conditions) {
|
|
352
|
-
if (cond.file) {
|
|
353
|
-
uris.add(cond.file);
|
|
354
|
-
}
|
|
355
|
-
}
|
|
356
|
-
}
|
|
357
|
-
}
|
|
358
|
-
|
|
359
|
-
return uris;
|
|
360
|
-
},
|
|
361
|
-
|
|
362
|
-
/**
|
|
363
|
-
* Get cache statistics.
|
|
364
|
-
* @returns {Object} Cache statistics
|
|
365
|
-
*/
|
|
366
|
-
getCacheStats() {
|
|
367
|
-
const now = Date.now();
|
|
368
|
-
let validEntries = 0;
|
|
369
|
-
let expiredEntries = 0;
|
|
370
|
-
|
|
371
|
-
for (const [_key, value] of cache.entries()) {
|
|
372
|
-
if (now - value.timestamp < cacheMaxAge) {
|
|
373
|
-
validEntries++;
|
|
374
|
-
} else {
|
|
375
|
-
expiredEntries++;
|
|
376
|
-
}
|
|
377
|
-
}
|
|
378
|
-
|
|
379
|
-
return {
|
|
380
|
-
totalEntries: cache.size,
|
|
381
|
-
validEntries,
|
|
382
|
-
expiredEntries,
|
|
383
|
-
cacheMaxAge,
|
|
384
|
-
};
|
|
385
|
-
},
|
|
386
|
-
};
|
|
387
|
-
}
|