@unrdf/knowledge-engine 5.0.1 → 26.4.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +13 -7
- package/src/ai-enhanced-search.mjs +371 -0
- package/src/anomaly-detector.mjs +226 -0
- package/src/artifact-generator.mjs +251 -0
- package/src/browser.mjs +1 -1
- package/src/chatman/disruption-arithmetic.mjs +140 -0
- package/src/chatman/market-dynamics.mjs +140 -0
- package/src/chatman/organizational-dynamics.mjs +140 -0
- package/src/chatman/strategic-dynamics.mjs +140 -0
- package/src/chatman-config-loader.mjs +282 -0
- package/src/chatman-engine.mjs +431 -0
- package/src/chatman-operator.mjs +342 -0
- package/src/dark-field-detector.mjs +312 -0
- package/src/formation-theorems.mjs +345 -0
- package/src/index.mjs +20 -2
- package/src/knowledge-hook-manager.mjs +1 -1
- package/src/lockchain-writer-browser.mjs +2 -2
- package/src/observability.mjs +40 -4
- package/src/query-optimizer.mjs +1 -1
- package/src/resolution-layer.mjs +1 -1
- package/src/transaction.mjs +11 -9
- package/README.md +0 -84
- package/src/browser-shims.mjs +0 -343
- package/src/canonicalize.mjs +0 -414
- package/src/condition-cache.mjs +0 -109
- package/src/condition-evaluator.mjs +0 -722
- package/src/dark-matter-core.mjs +0 -742
- package/src/define-hook.mjs +0 -213
- package/src/effect-sandbox-browser.mjs +0 -283
- package/src/effect-sandbox-worker.mjs +0 -170
- package/src/effect-sandbox.mjs +0 -517
- package/src/engines/index.mjs +0 -11
- package/src/engines/rdf-engine.mjs +0 -299
- package/src/file-resolver.mjs +0 -387
- package/src/hook-executor-batching.mjs +0 -277
- package/src/hook-executor.mjs +0 -870
- package/src/hook-management.mjs +0 -150
- package/src/ken-parliment.mjs +0 -119
- package/src/ken.mjs +0 -149
- package/src/knowledge-engine/builtin-rules.mjs +0 -190
- package/src/knowledge-engine/inference-engine.mjs +0 -418
- package/src/knowledge-engine/knowledge-engine.mjs +0 -317
- package/src/knowledge-engine/pattern-dsl.mjs +0 -142
- package/src/knowledge-engine/pattern-matcher.mjs +0 -215
- package/src/knowledge-engine/rules.mjs +0 -184
- package/src/knowledge-engine.mjs +0 -319
- package/src/knowledge-hook-engine.mjs +0 -360
- package/src/knowledge-substrate-core.mjs +0 -927
- package/src/lite.mjs +0 -222
- package/src/lockchain-writer.mjs +0 -602
- package/src/monitoring/andon-signals.mjs +0 -775
- package/src/parse.mjs +0 -290
- package/src/performance-optimizer.mjs +0 -678
- package/src/policy-pack.mjs +0 -572
- package/src/query-cache.mjs +0 -116
- package/src/query.mjs +0 -306
- package/src/reason.mjs +0 -350
- package/src/schemas.mjs +0 -1063
- package/src/security/error-sanitizer.mjs +0 -257
- package/src/security/path-validator.mjs +0 -194
- package/src/security/sandbox-restrictions.mjs +0 -331
- package/src/security-validator.mjs +0 -389
- package/src/store-cache.mjs +0 -137
- package/src/telemetry.mjs +0 -167
- package/src/utils/adaptive-monitor.mjs +0 -746
- package/src/utils/circuit-breaker.mjs +0 -513
- package/src/utils/edge-case-handler.mjs +0 -503
- package/src/utils/memory-manager.mjs +0 -498
- package/src/utils/ring-buffer.mjs +0 -282
- package/src/validate.mjs +0 -319
- package/src/validators/index.mjs +0 -338
|
@@ -1,722 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* @file Condition evaluation engine for knowledge hooks.
|
|
3
|
-
* @module condition-evaluator
|
|
4
|
-
*
|
|
5
|
-
* @description
|
|
6
|
-
* Production-ready condition evaluator that loads and executes SPARQL queries
|
|
7
|
-
* and SHACL validations to determine if hooks should trigger.
|
|
8
|
-
*/
|
|
9
|
-
|
|
10
|
-
import { createFileResolver } from './file-resolver.mjs';
|
|
11
|
-
import { ask, select } from './query.mjs';
|
|
12
|
-
import { validateShacl } from './validate.mjs';
|
|
13
|
-
import { createQueryOptimizer } from './query-optimizer.mjs';
|
|
14
|
-
import { createStore } from '@unrdf/oxigraph';
|
|
15
|
-
|
|
16
|
-
/**
|
|
17
|
-
* Evaluate a hook condition against a graph.
|
|
18
|
-
* @param {Object} condition - The hook condition definition
|
|
19
|
-
* @param {Store} graph - The RDF graph to evaluate against
|
|
20
|
-
* @param {Object} [options] - Evaluation options
|
|
21
|
-
* @param {string} [options.basePath] - Base path for file resolution
|
|
22
|
-
* @param {Object} [options.env] - Environment variables
|
|
23
|
-
* @returns {Promise<boolean|Array|Object>} Condition evaluation result
|
|
24
|
-
*
|
|
25
|
-
* @throws {Error} If condition evaluation fails
|
|
26
|
-
*/
|
|
27
|
-
export async function evaluateCondition(condition, graph, options = {}) {
|
|
28
|
-
if (!condition || typeof condition !== 'object') {
|
|
29
|
-
throw new TypeError('evaluateCondition: condition must be an object');
|
|
30
|
-
}
|
|
31
|
-
if (!graph || typeof graph.getQuads !== 'function') {
|
|
32
|
-
throw new TypeError('evaluateCondition: graph must be a valid Store instance');
|
|
33
|
-
}
|
|
34
|
-
|
|
35
|
-
const { basePath = process.cwd(), env = {} } = options;
|
|
36
|
-
const resolver = createFileResolver({ basePath });
|
|
37
|
-
|
|
38
|
-
try {
|
|
39
|
-
switch (condition.kind) {
|
|
40
|
-
case 'sparql-ask':
|
|
41
|
-
return await evaluateSparqlAsk(condition, graph, resolver, env);
|
|
42
|
-
case 'sparql-select':
|
|
43
|
-
return await evaluateSparqlSelect(condition, graph, resolver, env);
|
|
44
|
-
case 'shacl':
|
|
45
|
-
return await evaluateShacl(condition, graph, resolver, env);
|
|
46
|
-
case 'delta':
|
|
47
|
-
return await evaluateDelta(condition, graph, resolver, env, options);
|
|
48
|
-
case 'threshold':
|
|
49
|
-
return await evaluateThreshold(condition, graph, resolver, env, options);
|
|
50
|
-
case 'count':
|
|
51
|
-
return await evaluateCount(condition, graph, resolver, env, options);
|
|
52
|
-
case 'window':
|
|
53
|
-
return await evaluateWindow(condition, graph, resolver, env, options);
|
|
54
|
-
default:
|
|
55
|
-
throw new Error(`Unsupported condition kind: ${condition.kind}`);
|
|
56
|
-
}
|
|
57
|
-
} catch (error) {
|
|
58
|
-
throw new Error(`Condition evaluation failed: ${error.message}`);
|
|
59
|
-
}
|
|
60
|
-
}
|
|
61
|
-
|
|
62
|
-
/**
|
|
63
|
-
* Evaluate a SPARQL ASK query condition.
|
|
64
|
-
* @param {Object} condition - The condition definition
|
|
65
|
-
* @param {Store} graph - The RDF graph
|
|
66
|
-
* @param {Object} resolver - File resolver instance
|
|
67
|
-
* @param {Object} env - Environment variables
|
|
68
|
-
* @returns {Promise<boolean>} ASK query result
|
|
69
|
-
*/
|
|
70
|
-
async function evaluateSparqlAsk(condition, graph, resolver, env) {
|
|
71
|
-
const { ref, query: inlineQuery } = condition;
|
|
72
|
-
|
|
73
|
-
// Support both file reference (ref) and inline query (query)
|
|
74
|
-
let sparql;
|
|
75
|
-
if (ref && ref.uri && ref.sha256) {
|
|
76
|
-
// Load SPARQL query from file
|
|
77
|
-
const loaded = await resolver.loadSparql(ref.uri, ref.sha256);
|
|
78
|
-
sparql = loaded.sparql;
|
|
79
|
-
} else if (inlineQuery) {
|
|
80
|
-
// Use inline query string for convenience
|
|
81
|
-
sparql = inlineQuery;
|
|
82
|
-
} else {
|
|
83
|
-
throw new Error(
|
|
84
|
-
'SPARQL ASK condition requires either ref (file reference) or query (inline string)'
|
|
85
|
-
);
|
|
86
|
-
}
|
|
87
|
-
|
|
88
|
-
// Execute ASK query
|
|
89
|
-
const result = await ask(graph, sparql, {
|
|
90
|
-
env,
|
|
91
|
-
deterministic: true,
|
|
92
|
-
});
|
|
93
|
-
|
|
94
|
-
return result;
|
|
95
|
-
}
|
|
96
|
-
|
|
97
|
-
/**
|
|
98
|
-
* Evaluate a SPARQL SELECT query condition.
|
|
99
|
-
* @param {Object} condition - The condition definition
|
|
100
|
-
* @param {Store} graph - The RDF graph
|
|
101
|
-
* @param {Object} resolver - File resolver instance
|
|
102
|
-
* @param {Object} env - Environment variables
|
|
103
|
-
* @returns {Promise<Array>} SELECT query results
|
|
104
|
-
*/
|
|
105
|
-
async function evaluateSparqlSelect(condition, graph, resolver, env) {
|
|
106
|
-
const { ref, query: inlineQuery } = condition;
|
|
107
|
-
|
|
108
|
-
// Support both file reference (ref) and inline query (query)
|
|
109
|
-
let sparql;
|
|
110
|
-
if (ref && ref.uri && ref.sha256) {
|
|
111
|
-
// Load SPARQL query from file
|
|
112
|
-
const loaded = await resolver.loadSparql(ref.uri, ref.sha256);
|
|
113
|
-
sparql = loaded.sparql;
|
|
114
|
-
} else if (inlineQuery) {
|
|
115
|
-
// Use inline query string for convenience
|
|
116
|
-
sparql = inlineQuery;
|
|
117
|
-
} else {
|
|
118
|
-
throw new Error(
|
|
119
|
-
'SPARQL SELECT condition requires either ref (file reference) or query (inline string)'
|
|
120
|
-
);
|
|
121
|
-
}
|
|
122
|
-
|
|
123
|
-
// Execute SELECT query
|
|
124
|
-
const results = await select(graph, sparql, {
|
|
125
|
-
env,
|
|
126
|
-
deterministic: true,
|
|
127
|
-
});
|
|
128
|
-
|
|
129
|
-
return results;
|
|
130
|
-
}
|
|
131
|
-
|
|
132
|
-
/**
|
|
133
|
-
* Evaluate a SHACL validation condition.
|
|
134
|
-
* @param {Object} condition - The condition definition
|
|
135
|
-
* @param {Store} graph - The RDF graph
|
|
136
|
-
* @param {Object} resolver - File resolver instance
|
|
137
|
-
* @param {Object} env - Environment variables
|
|
138
|
-
* @returns {Promise<Object>} SHACL validation result
|
|
139
|
-
*/
|
|
140
|
-
async function evaluateShacl(condition, graph, resolver, env) {
|
|
141
|
-
const { ref } = condition;
|
|
142
|
-
|
|
143
|
-
if (!ref || !ref.uri || !ref.sha256) {
|
|
144
|
-
throw new Error('SHACL condition requires ref with uri and sha256');
|
|
145
|
-
}
|
|
146
|
-
|
|
147
|
-
// Load SHACL shapes file
|
|
148
|
-
const { turtle } = await resolver.loadShacl(ref.uri, ref.sha256);
|
|
149
|
-
|
|
150
|
-
// Execute SHACL validation
|
|
151
|
-
const report = validateShacl(graph, turtle, {
|
|
152
|
-
strict: env.strictMode || false,
|
|
153
|
-
includeDetails: true,
|
|
154
|
-
});
|
|
155
|
-
|
|
156
|
-
return report;
|
|
157
|
-
}
|
|
158
|
-
|
|
159
|
-
/**
|
|
160
|
-
* Create a condition evaluator with caching and error handling.
|
|
161
|
-
* @param {Object} [options] - Evaluator options
|
|
162
|
-
* @param {string} [options.basePath] - Base path for file resolution
|
|
163
|
-
* @param {boolean} [options.enableCache] - Enable condition result caching
|
|
164
|
-
* @param {number} [options.cacheMaxAge] - Cache max age in milliseconds
|
|
165
|
-
* @param {boolean} [options.strictMode] - Enable strict error handling
|
|
166
|
-
* @returns {Object} Condition evaluator instance
|
|
167
|
-
*/
|
|
168
|
-
export function createConditionEvaluator(options = {}) {
|
|
169
|
-
const {
|
|
170
|
-
basePath = process.cwd(),
|
|
171
|
-
enableCache = true,
|
|
172
|
-
cacheMaxAge = 60000, // 1 minute
|
|
173
|
-
strictMode = false,
|
|
174
|
-
enableOptimization = true,
|
|
175
|
-
optimizationConfig = {},
|
|
176
|
-
} = options;
|
|
177
|
-
|
|
178
|
-
const resolver = createFileResolver({ basePath, enableCache, cacheMaxAge });
|
|
179
|
-
const conditionCache = new Map();
|
|
180
|
-
const optimizer = enableOptimization ? createQueryOptimizer(optimizationConfig) : null;
|
|
181
|
-
|
|
182
|
-
const baseEvaluator = {
|
|
183
|
-
/**
|
|
184
|
-
* Validate a condition definition.
|
|
185
|
-
* @param {Object} condition - The condition definition
|
|
186
|
-
* @returns {Object} Validation result
|
|
187
|
-
*/
|
|
188
|
-
validateCondition(condition) {
|
|
189
|
-
return validateCondition(condition);
|
|
190
|
-
},
|
|
191
|
-
|
|
192
|
-
/**
|
|
193
|
-
* Evaluate a condition against a graph.
|
|
194
|
-
* @param {Object} condition - The condition definition
|
|
195
|
-
* @param {Store} graph - The RDF graph
|
|
196
|
-
* @param {Object} [env] - Environment variables
|
|
197
|
-
* @returns {Promise<any>} Condition evaluation result
|
|
198
|
-
*/
|
|
199
|
-
async evaluate(condition, graph, env = {}) {
|
|
200
|
-
const cacheKey = createCacheKey(condition, graph, env);
|
|
201
|
-
|
|
202
|
-
if (enableCache && conditionCache.has(cacheKey)) {
|
|
203
|
-
const cached = conditionCache.get(cacheKey);
|
|
204
|
-
if (Date.now() - cached.timestamp < cacheMaxAge) {
|
|
205
|
-
return cached.result;
|
|
206
|
-
}
|
|
207
|
-
conditionCache.delete(cacheKey);
|
|
208
|
-
}
|
|
209
|
-
|
|
210
|
-
try {
|
|
211
|
-
let result;
|
|
212
|
-
|
|
213
|
-
// For now, use standard evaluation
|
|
214
|
-
result = await evaluateCondition(condition, graph, { basePath, env });
|
|
215
|
-
|
|
216
|
-
if (enableCache) {
|
|
217
|
-
conditionCache.set(cacheKey, {
|
|
218
|
-
result,
|
|
219
|
-
timestamp: Date.now(),
|
|
220
|
-
});
|
|
221
|
-
}
|
|
222
|
-
|
|
223
|
-
return result;
|
|
224
|
-
} catch (error) {
|
|
225
|
-
if (strictMode) {
|
|
226
|
-
throw error;
|
|
227
|
-
}
|
|
228
|
-
|
|
229
|
-
// Return safe defaults in non-strict mode
|
|
230
|
-
switch (condition.kind) {
|
|
231
|
-
case 'sparql-ask':
|
|
232
|
-
return false;
|
|
233
|
-
case 'sparql-select':
|
|
234
|
-
return [];
|
|
235
|
-
case 'shacl':
|
|
236
|
-
return { conforms: false, results: [], error: error.message };
|
|
237
|
-
default:
|
|
238
|
-
return false;
|
|
239
|
-
}
|
|
240
|
-
}
|
|
241
|
-
},
|
|
242
|
-
|
|
243
|
-
/**
|
|
244
|
-
* Evaluate multiple conditions in parallel.
|
|
245
|
-
* @param {Array} conditions - Array of condition definitions
|
|
246
|
-
* @param {Store} graph - The RDF graph
|
|
247
|
-
* @param {Object} [env] - Environment variables
|
|
248
|
-
* @returns {Promise<Array>} Array of evaluation results
|
|
249
|
-
*/
|
|
250
|
-
async evaluateAll(conditions, graph, env = {}) {
|
|
251
|
-
if (!Array.isArray(conditions)) {
|
|
252
|
-
throw new TypeError('evaluateAll: conditions must be an array');
|
|
253
|
-
}
|
|
254
|
-
|
|
255
|
-
const promises = conditions.map(condition => this.evaluate(condition, graph, env));
|
|
256
|
-
|
|
257
|
-
return Promise.all(promises);
|
|
258
|
-
},
|
|
259
|
-
|
|
260
|
-
/**
|
|
261
|
-
* Check if a condition is satisfied (for trigger evaluation).
|
|
262
|
-
* @param {Object} condition - The condition definition
|
|
263
|
-
* @param {Store} graph - The RDF graph
|
|
264
|
-
* @param {Object} [env] - Environment variables
|
|
265
|
-
* @returns {Promise<boolean>} True if condition is satisfied
|
|
266
|
-
*/
|
|
267
|
-
async isSatisfied(condition, graph, env = {}) {
|
|
268
|
-
try {
|
|
269
|
-
const result = await this.evaluate(condition, graph, env);
|
|
270
|
-
|
|
271
|
-
switch (condition.kind) {
|
|
272
|
-
case 'sparql-ask':
|
|
273
|
-
return Boolean(result);
|
|
274
|
-
case 'sparql-select':
|
|
275
|
-
return Array.isArray(result) && result.length > 0;
|
|
276
|
-
case 'shacl':
|
|
277
|
-
return result.conforms === true;
|
|
278
|
-
default:
|
|
279
|
-
return false;
|
|
280
|
-
}
|
|
281
|
-
} catch (error) {
|
|
282
|
-
if (strictMode) {
|
|
283
|
-
throw error;
|
|
284
|
-
}
|
|
285
|
-
return false;
|
|
286
|
-
}
|
|
287
|
-
},
|
|
288
|
-
|
|
289
|
-
/**
|
|
290
|
-
* Clear all caches.
|
|
291
|
-
*/
|
|
292
|
-
clearCache() {
|
|
293
|
-
resolver.clearCache();
|
|
294
|
-
conditionCache.clear();
|
|
295
|
-
},
|
|
296
|
-
|
|
297
|
-
/**
|
|
298
|
-
* Get cache statistics.
|
|
299
|
-
* @returns {Object} Cache statistics
|
|
300
|
-
*/
|
|
301
|
-
getCacheStats() {
|
|
302
|
-
const fileStats = resolver.getCacheStats();
|
|
303
|
-
const now = Date.now();
|
|
304
|
-
let validEntries = 0;
|
|
305
|
-
let expiredEntries = 0;
|
|
306
|
-
|
|
307
|
-
for (const [_key, value] of conditionCache.entries()) {
|
|
308
|
-
if (now - value.timestamp < cacheMaxAge) {
|
|
309
|
-
validEntries++;
|
|
310
|
-
} else {
|
|
311
|
-
expiredEntries++;
|
|
312
|
-
}
|
|
313
|
-
}
|
|
314
|
-
|
|
315
|
-
return {
|
|
316
|
-
fileCache: fileStats,
|
|
317
|
-
conditionCache: {
|
|
318
|
-
totalEntries: conditionCache.size,
|
|
319
|
-
validEntries,
|
|
320
|
-
expiredEntries,
|
|
321
|
-
cacheMaxAge,
|
|
322
|
-
},
|
|
323
|
-
};
|
|
324
|
-
},
|
|
325
|
-
};
|
|
326
|
-
|
|
327
|
-
// Add optimizer methods if enabled
|
|
328
|
-
return addOptimizerMethods(baseEvaluator, optimizer);
|
|
329
|
-
}
|
|
330
|
-
|
|
331
|
-
/**
|
|
332
|
-
* Create a cache key for condition evaluation.
|
|
333
|
-
* @param {Object} condition - The condition definition
|
|
334
|
-
* @param {Store} graph - The RDF graph
|
|
335
|
-
* @param {Object} env - Environment variables
|
|
336
|
-
* @returns {string} Cache key
|
|
337
|
-
*/
|
|
338
|
-
function createCacheKey(condition, graph, env) {
|
|
339
|
-
const conditionKey = JSON.stringify({
|
|
340
|
-
kind: condition.kind,
|
|
341
|
-
uri: condition.ref?.uri,
|
|
342
|
-
sha256: condition.ref?.sha256,
|
|
343
|
-
});
|
|
344
|
-
|
|
345
|
-
const graphKey = graph.size.toString(); // Simple graph size as key
|
|
346
|
-
const envKey = JSON.stringify(env);
|
|
347
|
-
|
|
348
|
-
return `${conditionKey}:${graphKey}:${envKey}`;
|
|
349
|
-
}
|
|
350
|
-
|
|
351
|
-
/**
|
|
352
|
-
* Validate a condition definition.
|
|
353
|
-
* @param {Object} condition - The condition definition
|
|
354
|
-
* @returns {Object} Validation result
|
|
355
|
-
*/
|
|
356
|
-
export function validateCondition(condition) {
|
|
357
|
-
if (!condition || typeof condition !== 'object') {
|
|
358
|
-
return { valid: false, error: 'Condition must be an object' };
|
|
359
|
-
}
|
|
360
|
-
|
|
361
|
-
if (!condition.kind) {
|
|
362
|
-
return { valid: false, error: 'Condition must have a kind' };
|
|
363
|
-
}
|
|
364
|
-
|
|
365
|
-
if (
|
|
366
|
-
!['sparql-ask', 'sparql-select', 'shacl', 'delta', 'threshold', 'count', 'window'].includes(
|
|
367
|
-
condition.kind
|
|
368
|
-
)
|
|
369
|
-
) {
|
|
370
|
-
return {
|
|
371
|
-
valid: false,
|
|
372
|
-
error: `Unsupported condition kind: ${condition.kind}`,
|
|
373
|
-
};
|
|
374
|
-
}
|
|
375
|
-
|
|
376
|
-
// Support both file reference (ref) and inline content (query/shapes)
|
|
377
|
-
const hasRef = condition.ref && condition.ref.uri;
|
|
378
|
-
const hasInline = condition.query || condition.shapes;
|
|
379
|
-
|
|
380
|
-
if (!hasRef && !hasInline) {
|
|
381
|
-
return {
|
|
382
|
-
valid: false,
|
|
383
|
-
error: 'Condition must have either ref (file reference) or query/shapes (inline content)',
|
|
384
|
-
};
|
|
385
|
-
}
|
|
386
|
-
|
|
387
|
-
// If ref is provided, validate it
|
|
388
|
-
if (condition.ref && !condition.ref.uri) {
|
|
389
|
-
return { valid: false, error: 'Condition ref must have a uri' };
|
|
390
|
-
}
|
|
391
|
-
|
|
392
|
-
// SHA-256 is optional for testing
|
|
393
|
-
// if (condition.ref && !condition.ref.sha256) {
|
|
394
|
-
// return { valid: false, error: 'Condition ref must have a sha256 hash' };
|
|
395
|
-
// }
|
|
396
|
-
|
|
397
|
-
// MediaType is optional for testing
|
|
398
|
-
// if (!condition.ref.mediaType) {
|
|
399
|
-
// return { valid: false, error: 'Condition ref must have a mediaType' };
|
|
400
|
-
// }
|
|
401
|
-
|
|
402
|
-
// Validate media type matches condition kind
|
|
403
|
-
const _expectedMediaTypes = {
|
|
404
|
-
'sparql-ask': 'application/sparql-query',
|
|
405
|
-
'sparql-select': 'application/sparql-query',
|
|
406
|
-
shacl: 'text/turtle',
|
|
407
|
-
};
|
|
408
|
-
|
|
409
|
-
// Media type validation is optional for testing
|
|
410
|
-
// if (condition.ref.mediaType !== expectedMediaTypes[condition.kind]) {
|
|
411
|
-
// return {
|
|
412
|
-
// valid: false,
|
|
413
|
-
// error: `Media type ${condition.ref.mediaType} does not match condition kind ${condition.kind}`
|
|
414
|
-
// };
|
|
415
|
-
// }
|
|
416
|
-
|
|
417
|
-
return { valid: true };
|
|
418
|
-
}
|
|
419
|
-
|
|
420
|
-
/**
|
|
421
|
-
* Add optimizer methods to the condition evaluator
|
|
422
|
-
*/
|
|
423
|
-
export function addOptimizerMethods(evaluator, optimizer) {
|
|
424
|
-
if (!optimizer) return evaluator;
|
|
425
|
-
|
|
426
|
-
return {
|
|
427
|
-
...evaluator,
|
|
428
|
-
/**
|
|
429
|
-
* Get optimizer statistics.
|
|
430
|
-
* @returns {Object} Optimizer statistics
|
|
431
|
-
*/
|
|
432
|
-
getOptimizerStats() {
|
|
433
|
-
return optimizer.getStats();
|
|
434
|
-
},
|
|
435
|
-
|
|
436
|
-
/**
|
|
437
|
-
* Create indexes for the graph.
|
|
438
|
-
* @param {Store} graph - RDF graph
|
|
439
|
-
* @returns {Promise<Array>} Created indexes
|
|
440
|
-
*/
|
|
441
|
-
async createIndexes(graph) {
|
|
442
|
-
return optimizer.createIndexes(graph);
|
|
443
|
-
},
|
|
444
|
-
|
|
445
|
-
/**
|
|
446
|
-
* Update indexes with delta.
|
|
447
|
-
* @param {Object} delta - Delta to apply
|
|
448
|
-
* @returns {Promise<void>}
|
|
449
|
-
*/
|
|
450
|
-
async updateIndexes(delta) {
|
|
451
|
-
await optimizer.updateIndexes(delta);
|
|
452
|
-
},
|
|
453
|
-
|
|
454
|
-
/**
|
|
455
|
-
* Clear optimizer caches.
|
|
456
|
-
*/
|
|
457
|
-
clearOptimizer() {
|
|
458
|
-
optimizer.clear();
|
|
459
|
-
},
|
|
460
|
-
};
|
|
461
|
-
}
|
|
462
|
-
|
|
463
|
-
/**
|
|
464
|
-
* Evaluate a DELTA predicate condition
|
|
465
|
-
* @param {Object} condition - The condition definition
|
|
466
|
-
* @param {Store} graph - The RDF graph
|
|
467
|
-
* @param {Object} resolver - File resolver instance
|
|
468
|
-
* @param {Object} env - Environment variables
|
|
469
|
-
* @param {Object} options - Evaluation options
|
|
470
|
-
* @returns {Promise<boolean>} Delta condition result
|
|
471
|
-
*/
|
|
472
|
-
async function evaluateDelta(condition, graph, resolver, env, options) {
|
|
473
|
-
const { spec } = condition;
|
|
474
|
-
const { change, _key, threshold = 0.1, baseline } = spec;
|
|
475
|
-
|
|
476
|
-
// Get current state hash
|
|
477
|
-
const currentHash = await hashStore(graph);
|
|
478
|
-
|
|
479
|
-
// Get baseline hash if provided
|
|
480
|
-
let baselineHash = null;
|
|
481
|
-
if (baseline) {
|
|
482
|
-
try {
|
|
483
|
-
const baselineStore = createStore();
|
|
484
|
-
// Load baseline data
|
|
485
|
-
baselineHash = await hashStore(baselineStore);
|
|
486
|
-
} catch (error) {
|
|
487
|
-
console.warn(`Failed to load baseline: ${error.message}`);
|
|
488
|
-
}
|
|
489
|
-
}
|
|
490
|
-
|
|
491
|
-
// Calculate change magnitude
|
|
492
|
-
let changeMagnitude = 0;
|
|
493
|
-
if (baselineHash && currentHash !== baselineHash) {
|
|
494
|
-
changeMagnitude = 1.0; // Full change detected
|
|
495
|
-
} else if (options.delta) {
|
|
496
|
-
// Calculate change based on delta size
|
|
497
|
-
const totalQuads = graph.size;
|
|
498
|
-
const deltaSize =
|
|
499
|
-
(options.delta.additions?.length || 0) + (options.delta.removals?.length || 0);
|
|
500
|
-
changeMagnitude = totalQuads > 0 ? deltaSize / totalQuads : 0;
|
|
501
|
-
}
|
|
502
|
-
|
|
503
|
-
// Evaluate change type
|
|
504
|
-
switch (change) {
|
|
505
|
-
case 'any':
|
|
506
|
-
return changeMagnitude > 0;
|
|
507
|
-
case 'increase':
|
|
508
|
-
return changeMagnitude > threshold;
|
|
509
|
-
case 'decrease':
|
|
510
|
-
return changeMagnitude < -threshold;
|
|
511
|
-
case 'modify':
|
|
512
|
-
return Math.abs(changeMagnitude) > threshold;
|
|
513
|
-
default:
|
|
514
|
-
return false;
|
|
515
|
-
}
|
|
516
|
-
}
|
|
517
|
-
|
|
518
|
-
/**
|
|
519
|
-
* Evaluate a THRESHOLD predicate condition
|
|
520
|
-
* @param {Object} condition - The condition definition
|
|
521
|
-
* @param {Store} graph - The RDF graph
|
|
522
|
-
* @param {Object} resolver - File resolver instance
|
|
523
|
-
* @param {Object} env - Environment variables
|
|
524
|
-
* @param {Object} options - Evaluation options
|
|
525
|
-
* @returns {Promise<boolean>} Threshold condition result
|
|
526
|
-
*/
|
|
527
|
-
async function evaluateThreshold(condition, graph, _resolver, _env, _options) {
|
|
528
|
-
const { spec } = condition;
|
|
529
|
-
const { var: variable, op, value, aggregate = 'avg' } = spec;
|
|
530
|
-
|
|
531
|
-
// Execute query to get values
|
|
532
|
-
const query = `
|
|
533
|
-
SELECT ?${variable} WHERE {
|
|
534
|
-
?s ?p ?${variable}
|
|
535
|
-
}
|
|
536
|
-
`;
|
|
537
|
-
|
|
538
|
-
const results = await select(graph, query);
|
|
539
|
-
|
|
540
|
-
if (results.length === 0) {
|
|
541
|
-
return false;
|
|
542
|
-
}
|
|
543
|
-
|
|
544
|
-
// Extract numeric values
|
|
545
|
-
const values = results
|
|
546
|
-
.map(r => r[variable]?.value)
|
|
547
|
-
.filter(v => v !== undefined)
|
|
548
|
-
.map(v => parseFloat(v))
|
|
549
|
-
.filter(v => !isNaN(v));
|
|
550
|
-
|
|
551
|
-
if (values.length === 0) {
|
|
552
|
-
return false;
|
|
553
|
-
}
|
|
554
|
-
|
|
555
|
-
// Calculate aggregate
|
|
556
|
-
let aggregateValue;
|
|
557
|
-
switch (aggregate) {
|
|
558
|
-
case 'sum':
|
|
559
|
-
aggregateValue = values.reduce((sum, v) => sum + v, 0);
|
|
560
|
-
break;
|
|
561
|
-
case 'avg':
|
|
562
|
-
aggregateValue = values.reduce((sum, v) => sum + v, 0) / values.length;
|
|
563
|
-
break;
|
|
564
|
-
case 'min':
|
|
565
|
-
aggregateValue = Math.min(...values);
|
|
566
|
-
break;
|
|
567
|
-
case 'max':
|
|
568
|
-
aggregateValue = Math.max(...values);
|
|
569
|
-
break;
|
|
570
|
-
case 'count':
|
|
571
|
-
aggregateValue = values.length;
|
|
572
|
-
break;
|
|
573
|
-
default:
|
|
574
|
-
aggregateValue = values[0];
|
|
575
|
-
}
|
|
576
|
-
|
|
577
|
-
// Evaluate operator
|
|
578
|
-
switch (op) {
|
|
579
|
-
case '>':
|
|
580
|
-
return aggregateValue > value;
|
|
581
|
-
case '>=':
|
|
582
|
-
return aggregateValue >= value;
|
|
583
|
-
case '<':
|
|
584
|
-
return aggregateValue < value;
|
|
585
|
-
case '<=':
|
|
586
|
-
return aggregateValue <= value;
|
|
587
|
-
case '==':
|
|
588
|
-
return Math.abs(aggregateValue - value) < 0.0001;
|
|
589
|
-
case '!=':
|
|
590
|
-
return Math.abs(aggregateValue - value) >= 0.0001;
|
|
591
|
-
default:
|
|
592
|
-
return false;
|
|
593
|
-
}
|
|
594
|
-
}
|
|
595
|
-
|
|
596
|
-
/**
|
|
597
|
-
* Evaluate a COUNT predicate condition
|
|
598
|
-
* @param {Object} condition - The condition definition
|
|
599
|
-
* @param {Store} graph - The RDF graph
|
|
600
|
-
* @param {Object} resolver - File resolver instance
|
|
601
|
-
* @param {Object} env - Environment variables
|
|
602
|
-
* @param {Object} options - Evaluation options
|
|
603
|
-
* @returns {Promise<boolean>} Count condition result
|
|
604
|
-
*/
|
|
605
|
-
async function evaluateCount(condition, graph, _resolver, _env, _options) {
|
|
606
|
-
const { spec } = condition;
|
|
607
|
-
const { op, value, query: countQuery } = spec;
|
|
608
|
-
|
|
609
|
-
let count;
|
|
610
|
-
|
|
611
|
-
if (countQuery) {
|
|
612
|
-
// Use custom query for counting
|
|
613
|
-
const results = await select(graph, countQuery);
|
|
614
|
-
count = results.length;
|
|
615
|
-
} else {
|
|
616
|
-
// Count all quads
|
|
617
|
-
count = graph.size;
|
|
618
|
-
}
|
|
619
|
-
|
|
620
|
-
// Evaluate operator
|
|
621
|
-
switch (op) {
|
|
622
|
-
case '>':
|
|
623
|
-
return count > value;
|
|
624
|
-
case '>=':
|
|
625
|
-
return count >= value;
|
|
626
|
-
case '<':
|
|
627
|
-
return count < value;
|
|
628
|
-
case '<=':
|
|
629
|
-
return count <= value;
|
|
630
|
-
case '==':
|
|
631
|
-
return count === value;
|
|
632
|
-
case '!=':
|
|
633
|
-
return count !== value;
|
|
634
|
-
default:
|
|
635
|
-
return false;
|
|
636
|
-
}
|
|
637
|
-
}
|
|
638
|
-
|
|
639
|
-
/**
|
|
640
|
-
* Evaluate a WINDOW predicate condition
|
|
641
|
-
* @param {Object} condition - The condition definition
|
|
642
|
-
* @param {Store} graph - The RDF graph
|
|
643
|
-
* @param {Object} resolver - File resolver instance
|
|
644
|
-
* @param {Object} env - Environment variables
|
|
645
|
-
* @param {Object} options - Evaluation options
|
|
646
|
-
* @returns {Promise<boolean>} Window condition result
|
|
647
|
-
*/
|
|
648
|
-
async function evaluateWindow(condition, graph, _resolver, _env, _options) {
|
|
649
|
-
const { spec } = condition;
|
|
650
|
-
const { size, _slide = size, aggregate, query: windowQuery } = spec;
|
|
651
|
-
|
|
652
|
-
// For now, implement a simple window evaluation
|
|
653
|
-
// In a full implementation, this would maintain sliding windows over time
|
|
654
|
-
|
|
655
|
-
if (windowQuery) {
|
|
656
|
-
const results = await select(graph, windowQuery);
|
|
657
|
-
|
|
658
|
-
// Calculate aggregate over results
|
|
659
|
-
let aggregateValue;
|
|
660
|
-
switch (aggregate) {
|
|
661
|
-
case 'sum':
|
|
662
|
-
aggregateValue = results.reduce((sum, r) => {
|
|
663
|
-
const val = parseFloat(Object.values(r)[0]?.value || 0);
|
|
664
|
-
return sum + (isNaN(val) ? 0 : val);
|
|
665
|
-
}, 0);
|
|
666
|
-
break;
|
|
667
|
-
case 'avg':
|
|
668
|
-
const sum = results.reduce((sum, r) => {
|
|
669
|
-
const val = parseFloat(Object.values(r)[0]?.value || 0);
|
|
670
|
-
return sum + (isNaN(val) ? 0 : val);
|
|
671
|
-
}, 0);
|
|
672
|
-
aggregateValue = results.length > 0 ? sum / results.length : 0;
|
|
673
|
-
break;
|
|
674
|
-
case 'min':
|
|
675
|
-
aggregateValue = Math.min(
|
|
676
|
-
...results.map(r => {
|
|
677
|
-
const val = parseFloat(Object.values(r)[0]?.value || Infinity);
|
|
678
|
-
return isNaN(val) ? Infinity : val;
|
|
679
|
-
})
|
|
680
|
-
);
|
|
681
|
-
break;
|
|
682
|
-
case 'max':
|
|
683
|
-
aggregateValue = Math.max(
|
|
684
|
-
...results.map(r => {
|
|
685
|
-
const val = parseFloat(Object.values(r)[0]?.value || -Infinity);
|
|
686
|
-
return isNaN(val) ? -Infinity : val;
|
|
687
|
-
})
|
|
688
|
-
);
|
|
689
|
-
break;
|
|
690
|
-
case 'count':
|
|
691
|
-
aggregateValue = results.length;
|
|
692
|
-
break;
|
|
693
|
-
default:
|
|
694
|
-
aggregateValue = results.length;
|
|
695
|
-
}
|
|
696
|
-
|
|
697
|
-
// For window conditions, we typically check if aggregate exceeds threshold
|
|
698
|
-
// This is a simplified implementation
|
|
699
|
-
return aggregateValue > 0;
|
|
700
|
-
}
|
|
701
|
-
|
|
702
|
-
// Default: check if graph has any data in the window
|
|
703
|
-
return graph.size > 0;
|
|
704
|
-
}
|
|
705
|
-
|
|
706
|
-
/**
|
|
707
|
-
* Hash a store for delta comparison
|
|
708
|
-
* @param {Store} store - RDF store
|
|
709
|
-
* @returns {Promise<string>} Store hash
|
|
710
|
-
*/
|
|
711
|
-
async function hashStore(store) {
|
|
712
|
-
// Simple hash implementation - in production, use proper canonicalization
|
|
713
|
-
const quads = Array.from(store);
|
|
714
|
-
const quadStrings = quads
|
|
715
|
-
.map(
|
|
716
|
-
q =>
|
|
717
|
-
`${q.subject?.value || ''}:${q.predicate?.value || ''}:${q.object?.value || ''}:${q.graph?.value || ''}`
|
|
718
|
-
)
|
|
719
|
-
.sort();
|
|
720
|
-
|
|
721
|
-
return quadStrings.join('|');
|
|
722
|
-
}
|