opentology 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +609 -0
- package/dist/commands/context.d.ts +29 -0
- package/dist/commands/context.js +369 -0
- package/dist/commands/delete.d.ts +2 -0
- package/dist/commands/delete.js +46 -0
- package/dist/commands/diff.d.ts +2 -0
- package/dist/commands/diff.js +43 -0
- package/dist/commands/drop.d.ts +2 -0
- package/dist/commands/drop.js +41 -0
- package/dist/commands/graph.d.ts +2 -0
- package/dist/commands/graph.js +130 -0
- package/dist/commands/infer.d.ts +2 -0
- package/dist/commands/infer.js +47 -0
- package/dist/commands/init.d.ts +2 -0
- package/dist/commands/init.js +53 -0
- package/dist/commands/mcp.d.ts +2 -0
- package/dist/commands/mcp.js +9 -0
- package/dist/commands/prefix.d.ts +2 -0
- package/dist/commands/prefix.js +73 -0
- package/dist/commands/pull.d.ts +2 -0
- package/dist/commands/pull.js +43 -0
- package/dist/commands/push.d.ts +2 -0
- package/dist/commands/push.js +79 -0
- package/dist/commands/query.d.ts +2 -0
- package/dist/commands/query.js +119 -0
- package/dist/commands/shapes.d.ts +2 -0
- package/dist/commands/shapes.js +67 -0
- package/dist/commands/status.d.ts +2 -0
- package/dist/commands/status.js +47 -0
- package/dist/commands/validate.d.ts +2 -0
- package/dist/commands/validate.js +46 -0
- package/dist/index.d.ts +2 -0
- package/dist/index.js +38 -0
- package/dist/lib/codebase-scanner.d.ts +41 -0
- package/dist/lib/codebase-scanner.js +360 -0
- package/dist/lib/config.d.ts +16 -0
- package/dist/lib/config.js +70 -0
- package/dist/lib/embedded-adapter.d.ts +45 -0
- package/dist/lib/embedded-adapter.js +202 -0
- package/dist/lib/http-adapter.d.ts +41 -0
- package/dist/lib/http-adapter.js +169 -0
- package/dist/lib/oxigraph.d.ts +62 -0
- package/dist/lib/oxigraph.js +323 -0
- package/dist/lib/reasoner.d.ts +19 -0
- package/dist/lib/reasoner.js +310 -0
- package/dist/lib/shacl.d.ts +22 -0
- package/dist/lib/shacl.js +105 -0
- package/dist/lib/sparql-utils.d.ts +28 -0
- package/dist/lib/sparql-utils.js +217 -0
- package/dist/lib/store-adapter.d.ts +50 -0
- package/dist/lib/store-adapter.js +1 -0
- package/dist/lib/store-factory.d.ts +9 -0
- package/dist/lib/store-factory.js +71 -0
- package/dist/lib/validator.d.ts +10 -0
- package/dist/lib/validator.js +40 -0
- package/dist/mcp/server.d.ts +3 -0
- package/dist/mcp/server.js +1020 -0
- package/dist/templates/claude-md-context.d.ts +4 -0
- package/dist/templates/claude-md-context.js +104 -0
- package/dist/templates/otx-ontology.d.ts +2 -0
- package/dist/templates/otx-ontology.js +31 -0
- package/dist/templates/session-start-hook.d.ts +1 -0
- package/dist/templates/session-start-hook.js +94 -0
- package/dist/templates/slash-commands.d.ts +5 -0
- package/dist/templates/slash-commands.js +108 -0
- package/package.json +58 -0
|
@@ -0,0 +1,323 @@
|
|
|
1
|
+
import { Parser, Writer } from 'n3';
|
|
2
|
+
export async function sparqlQuery(endpoint, query) {
|
|
3
|
+
const url = `${endpoint}/query`;
|
|
4
|
+
const response = await fetch(url, {
|
|
5
|
+
method: 'POST',
|
|
6
|
+
headers: {
|
|
7
|
+
'Content-Type': 'application/sparql-query',
|
|
8
|
+
Accept: 'application/sparql-results+json',
|
|
9
|
+
},
|
|
10
|
+
body: query,
|
|
11
|
+
});
|
|
12
|
+
if (!response.ok) {
|
|
13
|
+
const body = await response.text().catch(() => '');
|
|
14
|
+
throw new Error(`SPARQL query failed (${response.status} ${response.statusText})${body ? `: ${body}` : ''}`);
|
|
15
|
+
}
|
|
16
|
+
return response.json();
|
|
17
|
+
}
|
|
18
|
+
export async function sparqlUpdate(endpoint, update) {
|
|
19
|
+
const url = `${endpoint}/update`;
|
|
20
|
+
const response = await fetch(url, {
|
|
21
|
+
method: 'POST',
|
|
22
|
+
headers: {
|
|
23
|
+
'Content-Type': 'application/sparql-update',
|
|
24
|
+
},
|
|
25
|
+
body: update,
|
|
26
|
+
});
|
|
27
|
+
if (!response.ok) {
|
|
28
|
+
const body = await response.text().catch(() => '');
|
|
29
|
+
throw new Error(`SPARQL update failed (${response.status} ${response.statusText})${body ? `: ${body}` : ''}`);
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
function termToSparql(term) {
|
|
33
|
+
switch (term.termType) {
|
|
34
|
+
case 'NamedNode':
|
|
35
|
+
return `<${term.value}>`;
|
|
36
|
+
case 'BlankNode':
|
|
37
|
+
return `_:${term.value}`;
|
|
38
|
+
case 'Literal': {
|
|
39
|
+
const escaped = term.value
|
|
40
|
+
.replace(/\\/g, '\\\\')
|
|
41
|
+
.replace(/"/g, '\\"')
|
|
42
|
+
.replace(/\n/g, '\\n')
|
|
43
|
+
.replace(/\r/g, '\\r')
|
|
44
|
+
.replace(/\t/g, '\\t');
|
|
45
|
+
if (term.language) {
|
|
46
|
+
return `"${escaped}"@${term.language}`;
|
|
47
|
+
}
|
|
48
|
+
if (term.datatype && term.datatype.value !== 'http://www.w3.org/2001/XMLSchema#string') {
|
|
49
|
+
return `"${escaped}"^^<${term.datatype.value}>`;
|
|
50
|
+
}
|
|
51
|
+
return `"${escaped}"`;
|
|
52
|
+
}
|
|
53
|
+
default:
|
|
54
|
+
throw new Error(`Unsupported term type: ${term.termType}`);
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
function parseTurtle(turtle) {
|
|
58
|
+
return new Promise((resolve, reject) => {
|
|
59
|
+
const parser = new Parser();
|
|
60
|
+
const quads = [];
|
|
61
|
+
parser.parse(turtle, (err, quad) => {
|
|
62
|
+
if (err) {
|
|
63
|
+
reject(new Error(`Failed to parse Turtle: ${err.message}`));
|
|
64
|
+
return;
|
|
65
|
+
}
|
|
66
|
+
if (quad) {
|
|
67
|
+
quads.push(quad);
|
|
68
|
+
}
|
|
69
|
+
else {
|
|
70
|
+
resolve(quads);
|
|
71
|
+
}
|
|
72
|
+
});
|
|
73
|
+
});
|
|
74
|
+
}
|
|
75
|
+
export async function insertTurtle(endpoint, graphUri, turtle) {
|
|
76
|
+
const quads = await parseTurtle(turtle);
|
|
77
|
+
if (quads.length === 0) {
|
|
78
|
+
return;
|
|
79
|
+
}
|
|
80
|
+
const tripleLines = quads
|
|
81
|
+
.map((q) => ` ${termToSparql(q.subject)} ${termToSparql(q.predicate)} ${termToSparql(q.object)} .`)
|
|
82
|
+
.join('\n');
|
|
83
|
+
const update = `INSERT DATA {\n GRAPH <${graphUri}> {\n${tripleLines}\n }\n}`;
|
|
84
|
+
await sparqlUpdate(endpoint, update);
|
|
85
|
+
}
|
|
86
|
+
export async function getGraphTripleCount(endpoint, graphUri) {
|
|
87
|
+
const query = `SELECT (COUNT(*) AS ?count) WHERE { GRAPH <${graphUri}> { ?s ?p ?o } }`;
|
|
88
|
+
const results = await sparqlQuery(endpoint, query);
|
|
89
|
+
const binding = results.results.bindings[0];
|
|
90
|
+
if (!binding || !binding['count']) {
|
|
91
|
+
return 0;
|
|
92
|
+
}
|
|
93
|
+
return parseInt(binding['count'].value, 10);
|
|
94
|
+
}
|
|
95
|
+
/**
|
|
96
|
+
* Returns true if the query already manages its own graph scoping.
|
|
97
|
+
* Matches GRAPH, FROM NAMED, or FROM < (case-insensitive).
|
|
98
|
+
*/
|
|
99
|
+
export function hasGraphScope(sparql) {
|
|
100
|
+
return /\bGRAPH\b|\bFROM\s+NAMED\b|\bFROM\s*</i.test(sparql);
|
|
101
|
+
}
|
|
102
|
+
/**
|
|
103
|
+
* Wraps the WHERE body of a SPARQL SELECT/CONSTRUCT/ASK/DESCRIBE query so
|
|
104
|
+
* that all triple patterns are scoped to `graphUri`.
|
|
105
|
+
*
|
|
106
|
+
* Handles:
|
|
107
|
+
* SELECT ... WHERE { ... } → standard form
|
|
108
|
+
* SELECT ... { ... } → shorthand (no WHERE keyword)
|
|
109
|
+
*
|
|
110
|
+
* Returns null if the outermost `{ ... }` block cannot be located safely.
|
|
111
|
+
*/
|
|
112
|
+
export function autoScopeQuery(sparql, graphUri) {
|
|
113
|
+
// Find the first `{` that opens the body.
|
|
114
|
+
// We look for WHERE { or, as a fallback, any { after the projection clause.
|
|
115
|
+
const whereMatch = sparql.match(/\bWHERE\s*\{/i);
|
|
116
|
+
let braceStart;
|
|
117
|
+
if (whereMatch && whereMatch.index !== undefined) {
|
|
118
|
+
// Position of `{` inside `WHERE {`
|
|
119
|
+
braceStart = whereMatch.index + whereMatch[0].length - 1;
|
|
120
|
+
}
|
|
121
|
+
else {
|
|
122
|
+
// Shorthand: find the first `{`
|
|
123
|
+
const firstBrace = sparql.indexOf('{');
|
|
124
|
+
if (firstBrace === -1)
|
|
125
|
+
return null;
|
|
126
|
+
braceStart = firstBrace;
|
|
127
|
+
}
|
|
128
|
+
// Walk forward to find the matching closing `}` (respecting nesting).
|
|
129
|
+
let depth = 0;
|
|
130
|
+
let braceEnd = -1;
|
|
131
|
+
for (let i = braceStart; i < sparql.length; i++) {
|
|
132
|
+
if (sparql[i] === '{')
|
|
133
|
+
depth++;
|
|
134
|
+
else if (sparql[i] === '}') {
|
|
135
|
+
depth--;
|
|
136
|
+
if (depth === 0) {
|
|
137
|
+
braceEnd = i;
|
|
138
|
+
break;
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
if (braceEnd === -1)
|
|
143
|
+
return null;
|
|
144
|
+
const before = sparql.slice(0, braceStart + 1); // up to and including `{`
|
|
145
|
+
const inner = sparql.slice(braceStart + 1, braceEnd); // content between `{ … }`
|
|
146
|
+
const after = sparql.slice(braceEnd); // from `}` onwards
|
|
147
|
+
return `${before} GRAPH <${graphUri}> {${inner}} ${after}`;
|
|
148
|
+
}
|
|
149
|
+
const WELL_KNOWN_PREFIXES = {
|
|
150
|
+
'http://www.w3.org/1999/02/22-rdf-syntax-ns#': 'rdf',
|
|
151
|
+
'http://www.w3.org/2000/01/rdf-schema#': 'rdfs',
|
|
152
|
+
'http://www.w3.org/2002/07/owl#': 'owl',
|
|
153
|
+
'http://schema.org/': 'schema',
|
|
154
|
+
'http://xmlns.com/foaf/0.1/': 'foaf',
|
|
155
|
+
'http://www.w3.org/2001/XMLSchema#': 'xsd',
|
|
156
|
+
'http://purl.org/dc/elements/1.1/': 'dc',
|
|
157
|
+
'http://purl.org/dc/terms/': 'dcterms',
|
|
158
|
+
'http://www.w3.org/2004/02/skos/core#': 'skos',
|
|
159
|
+
'http://www.w3.org/ns/prov#': 'prov',
|
|
160
|
+
};
|
|
161
|
+
function extractPrefixes(uris) {
|
|
162
|
+
const prefixes = {};
|
|
163
|
+
for (const uri of uris) {
|
|
164
|
+
// Check well-known prefixes first
|
|
165
|
+
for (const [ns, prefix] of Object.entries(WELL_KNOWN_PREFIXES)) {
|
|
166
|
+
if (uri.startsWith(ns) && !(prefix in Object.values(prefixes))) {
|
|
167
|
+
prefixes[prefix] = ns;
|
|
168
|
+
break;
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
// If not matched by well-known, derive from URI structure
|
|
172
|
+
if (!Object.values(prefixes).some((ns) => uri.startsWith(ns))) {
|
|
173
|
+
// Try hash-based namespace (e.g. http://example.org/ontology#)
|
|
174
|
+
const hashIdx = uri.lastIndexOf('#');
|
|
175
|
+
if (hashIdx !== -1) {
|
|
176
|
+
const ns = uri.slice(0, hashIdx + 1);
|
|
177
|
+
if (!Object.values(prefixes).includes(ns)) {
|
|
178
|
+
// Derive a short prefix from the last path segment before the hash
|
|
179
|
+
const pathSegments = ns.replace(/#$/, '').split('/').filter(Boolean);
|
|
180
|
+
const candidate = pathSegments[pathSegments.length - 1]
|
|
181
|
+
?.toLowerCase()
|
|
182
|
+
.replace(/[^a-z0-9]/g, '')
|
|
183
|
+
.slice(0, 8);
|
|
184
|
+
if (candidate && !(candidate in prefixes)) {
|
|
185
|
+
prefixes[candidate] = ns;
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
else {
|
|
190
|
+
// Slash-based namespace (e.g. http://example.org/ontology/)
|
|
191
|
+
const slashIdx = uri.lastIndexOf('/');
|
|
192
|
+
if (slashIdx !== -1) {
|
|
193
|
+
const ns = uri.slice(0, slashIdx + 1);
|
|
194
|
+
if (!Object.values(prefixes).includes(ns)) {
|
|
195
|
+
const pathSegments = ns.replace(/\/$/, '').split('/').filter(Boolean);
|
|
196
|
+
const candidate = pathSegments[pathSegments.length - 1]
|
|
197
|
+
?.toLowerCase()
|
|
198
|
+
.replace(/[^a-z0-9]/g, '')
|
|
199
|
+
.slice(0, 8);
|
|
200
|
+
if (candidate && !(candidate in prefixes)) {
|
|
201
|
+
prefixes[candidate] = ns;
|
|
202
|
+
}
|
|
203
|
+
}
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
return prefixes;
|
|
209
|
+
}
|
|
210
|
+
export async function dropGraph(endpoint, graphUri) {
|
|
211
|
+
await sparqlUpdate(endpoint, `DROP SILENT GRAPH <${graphUri}>`);
|
|
212
|
+
}
|
|
213
|
+
export async function deleteTriples(endpoint, graphUri, options) {
|
|
214
|
+
if (options.turtle !== undefined) {
|
|
215
|
+
const quads = await parseTurtle(options.turtle);
|
|
216
|
+
if (quads.length === 0) {
|
|
217
|
+
return;
|
|
218
|
+
}
|
|
219
|
+
const tripleLines = quads
|
|
220
|
+
.map((q) => ` ${termToSparql(q.subject)} ${termToSparql(q.predicate)} ${termToSparql(q.object)} .`)
|
|
221
|
+
.join('\n');
|
|
222
|
+
const update = `DELETE DATA {\n GRAPH <${graphUri}> {\n${tripleLines}\n }\n}`;
|
|
223
|
+
await sparqlUpdate(endpoint, update);
|
|
224
|
+
}
|
|
225
|
+
else if (options.where !== undefined) {
|
|
226
|
+
const update = `DELETE { GRAPH <${graphUri}> { ?s ?p ?o } } WHERE { GRAPH <${graphUri}> { ?s ?p ?o . ${options.where} } }`;
|
|
227
|
+
await sparqlUpdate(endpoint, update);
|
|
228
|
+
}
|
|
229
|
+
else {
|
|
230
|
+
throw new Error('deleteTriples: either options.turtle or options.where must be provided');
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
export async function getSchemaOverview(endpoint, graphUri) {
|
|
234
|
+
const tripleCount = await getGraphTripleCount(endpoint, graphUri);
|
|
235
|
+
const classResults = await sparqlQuery(endpoint, `SELECT DISTINCT ?class WHERE { GRAPH <${graphUri}> { ?s a ?class } } ORDER BY ?class`);
|
|
236
|
+
const classes = classResults.results.bindings
|
|
237
|
+
.map((b) => b['class']?.value)
|
|
238
|
+
.filter((v) => v !== undefined);
|
|
239
|
+
const propResults = await sparqlQuery(endpoint, `SELECT DISTINCT ?prop WHERE { GRAPH <${graphUri}> { ?s ?prop ?o } } ORDER BY ?prop`);
|
|
240
|
+
const properties = propResults.results.bindings
|
|
241
|
+
.map((b) => b['prop']?.value)
|
|
242
|
+
.filter((v) => v !== undefined);
|
|
243
|
+
const prefixes = extractPrefixes([...classes, ...properties]);
|
|
244
|
+
return { prefixes, classes, properties, tripleCount };
|
|
245
|
+
}
|
|
246
|
+
export async function getClassDetails(endpoint, graphUri, classUri) {
|
|
247
|
+
const countResults = await sparqlQuery(endpoint, `SELECT (COUNT(?s) as ?count) WHERE { GRAPH <${graphUri}> { ?s a <${classUri}> } }`);
|
|
248
|
+
const countBinding = countResults.results.bindings[0];
|
|
249
|
+
const instanceCount = countBinding?.['count']
|
|
250
|
+
? parseInt(countBinding['count'].value, 10)
|
|
251
|
+
: 0;
|
|
252
|
+
const propResults = await sparqlQuery(endpoint, `SELECT ?prop (COUNT(?prop) as ?count) WHERE { GRAPH <${graphUri}> { ?s a <${classUri}> . ?s ?prop ?o } } GROUP BY ?prop ORDER BY DESC(?count)`);
|
|
253
|
+
const properties = propResults.results.bindings
|
|
254
|
+
.filter((b) => b['prop'] && b['count'])
|
|
255
|
+
.map((b) => ({
|
|
256
|
+
property: b['prop'].value,
|
|
257
|
+
count: parseInt(b['count'].value, 10),
|
|
258
|
+
}));
|
|
259
|
+
const sampleResults = await sparqlQuery(endpoint, `SELECT ?s ?p ?o WHERE { GRAPH <${graphUri}> { ?s a <${classUri}> . ?s ?p ?o } } LIMIT 5`);
|
|
260
|
+
const sampleTriples = sampleResults.results.bindings
|
|
261
|
+
.filter((b) => b['s'] && b['p'] && b['o'])
|
|
262
|
+
.map((b) => ({
|
|
263
|
+
s: b['s'].value,
|
|
264
|
+
p: b['p'].value,
|
|
265
|
+
o: b['o'].value,
|
|
266
|
+
}));
|
|
267
|
+
return { classUri, instanceCount, properties, sampleTriples };
|
|
268
|
+
}
|
|
269
|
+
export async function diffGraph(endpoint, graphUri, localTurtle) {
|
|
270
|
+
const localQuads = await parseTurtle(localTurtle);
|
|
271
|
+
const localSet = new Set(localQuads.map((q) => `${termToSparql(q.subject)} ${termToSparql(q.predicate)} ${termToSparql(q.object)}`));
|
|
272
|
+
// Fetch remote quads via CONSTRUCT
|
|
273
|
+
const query = `CONSTRUCT { ?s ?p ?o } WHERE { GRAPH <${graphUri}> { ?s ?p ?o } }`;
|
|
274
|
+
const response = await fetch(`${endpoint}/query`, {
|
|
275
|
+
method: 'POST',
|
|
276
|
+
headers: {
|
|
277
|
+
'Content-Type': 'application/sparql-query',
|
|
278
|
+
Accept: 'text/turtle',
|
|
279
|
+
},
|
|
280
|
+
body: query,
|
|
281
|
+
});
|
|
282
|
+
if (!response.ok) {
|
|
283
|
+
const body = await response.text().catch(() => '');
|
|
284
|
+
throw new Error(`SPARQL CONSTRUCT query failed (${response.status} ${response.statusText})${body ? `: ${body}` : ''}`);
|
|
285
|
+
}
|
|
286
|
+
const remoteTurtle = await response.text();
|
|
287
|
+
const remoteQuads = remoteTurtle.trim() ? await parseTurtle(remoteTurtle) : [];
|
|
288
|
+
const remoteSet = new Set(remoteQuads.map((q) => `${termToSparql(q.subject)} ${termToSparql(q.predicate)} ${termToSparql(q.object)}`));
|
|
289
|
+
const added = [...localSet].filter((t) => !remoteSet.has(t));
|
|
290
|
+
const removed = [...remoteSet].filter((t) => !localSet.has(t));
|
|
291
|
+
const unchanged = [...localSet].filter((t) => remoteSet.has(t)).length;
|
|
292
|
+
return { added, removed, unchanged };
|
|
293
|
+
}
|
|
294
|
+
export async function exportGraph(endpoint, graphUri) {
|
|
295
|
+
const query = `CONSTRUCT { ?s ?p ?o } WHERE { GRAPH <${graphUri}> { ?s ?p ?o } }`;
|
|
296
|
+
const response = await fetch(`${endpoint}/query`, {
|
|
297
|
+
method: 'POST',
|
|
298
|
+
headers: {
|
|
299
|
+
'Content-Type': 'application/sparql-query',
|
|
300
|
+
Accept: 'text/turtle',
|
|
301
|
+
},
|
|
302
|
+
body: query,
|
|
303
|
+
});
|
|
304
|
+
if (!response.ok) {
|
|
305
|
+
const body = await response.text().catch(() => '');
|
|
306
|
+
throw new Error(`SPARQL CONSTRUCT query failed (${response.status} ${response.statusText})${body ? `: ${body}` : ''}`);
|
|
307
|
+
}
|
|
308
|
+
const turtleText = await response.text();
|
|
309
|
+
// Re-serialize through n3.Writer to ensure consistent formatting
|
|
310
|
+
const quads = await parseTurtle(turtleText);
|
|
311
|
+
return new Promise((resolve, reject) => {
|
|
312
|
+
const writer = new Writer({ format: 'Turtle' });
|
|
313
|
+
writer.addQuads(quads);
|
|
314
|
+
writer.end((err, result) => {
|
|
315
|
+
if (err) {
|
|
316
|
+
reject(new Error(`Failed to serialize Turtle: ${err.message}`));
|
|
317
|
+
}
|
|
318
|
+
else {
|
|
319
|
+
resolve(result);
|
|
320
|
+
}
|
|
321
|
+
});
|
|
322
|
+
});
|
|
323
|
+
}
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import type { StoreAdapter } from './store-adapter.js';
|
|
2
|
+
interface Triple {
|
|
3
|
+
s: string;
|
|
4
|
+
p: string;
|
|
5
|
+
o: string;
|
|
6
|
+
isLiteral: boolean;
|
|
7
|
+
}
|
|
8
|
+
export interface InferenceResult {
|
|
9
|
+
assertedCount: number;
|
|
10
|
+
inferredCount: number;
|
|
11
|
+
rules: Record<string, number>;
|
|
12
|
+
}
|
|
13
|
+
export declare function computeInferences(triples: Triple[]): {
|
|
14
|
+
inferred: Triple[];
|
|
15
|
+
rules: Record<string, number>;
|
|
16
|
+
};
|
|
17
|
+
export declare function clearInferences(adapter: StoreAdapter, graphUri: string): Promise<void>;
|
|
18
|
+
export declare function materializeInferences(adapter: StoreAdapter, graphUri: string): Promise<InferenceResult>;
|
|
19
|
+
export {};
|
|
@@ -0,0 +1,310 @@
|
|
|
1
|
+
import { Parser, Writer, DataFactory } from 'n3';
|
|
2
|
+
import { getInferenceGraphUri } from './sparql-utils.js';
|
|
3
|
+
const { namedNode } = DataFactory;
|
|
4
|
+
// ── Constants ──────────────────────────────────────────────────────────
|
|
5
|
+
const RDF_TYPE = 'http://www.w3.org/1999/02/22-rdf-syntax-ns#type';
|
|
6
|
+
const RDFS_SUBCLASS_OF = 'http://www.w3.org/2000/01/rdf-schema#subClassOf';
|
|
7
|
+
const RDFS_SUBPROPERTY_OF = 'http://www.w3.org/2000/01/rdf-schema#subPropertyOf';
|
|
8
|
+
const RDFS_DOMAIN = 'http://www.w3.org/2000/01/rdf-schema#domain';
|
|
9
|
+
const RDFS_RANGE = 'http://www.w3.org/2000/01/rdf-schema#range';
|
|
10
|
+
/** URIs in the RDF/RDFS/OWL meta-vocabulary — skip meta-level reasoning. */
|
|
11
|
+
const META_NAMESPACES = [
|
|
12
|
+
'http://www.w3.org/1999/02/22-rdf-syntax-ns#',
|
|
13
|
+
'http://www.w3.org/2000/01/rdf-schema#',
|
|
14
|
+
'http://www.w3.org/2002/07/owl#',
|
|
15
|
+
];
|
|
16
|
+
function isMeta(uri) {
|
|
17
|
+
return META_NAMESPACES.some((ns) => uri.startsWith(ns));
|
|
18
|
+
}
|
|
19
|
+
// ── Helpers ────────────────────────────────────────────────────────────
|
|
20
|
+
function tripleKey(t) {
|
|
21
|
+
return `${t.s}\t${t.p}\t${t.o}\t${t.isLiteral}`;
|
|
22
|
+
}
|
|
23
|
+
/**
|
|
24
|
+
* Compute the transitive closure of a binary relation expressed as triples
|
|
25
|
+
* with a specific predicate. Returns all pairs (a, c) such that a R* c.
|
|
26
|
+
*/
|
|
27
|
+
function transitiveClosure(pairs) {
|
|
28
|
+
// Build adjacency map
|
|
29
|
+
const children = new Map();
|
|
30
|
+
for (const [a, b] of pairs) {
|
|
31
|
+
let set = children.get(a);
|
|
32
|
+
if (!set) {
|
|
33
|
+
set = new Set();
|
|
34
|
+
children.set(a, set);
|
|
35
|
+
}
|
|
36
|
+
set.add(b);
|
|
37
|
+
}
|
|
38
|
+
const result = [];
|
|
39
|
+
const visited = new Map();
|
|
40
|
+
function reachable(node) {
|
|
41
|
+
const cached = visited.get(node);
|
|
42
|
+
if (cached)
|
|
43
|
+
return cached;
|
|
44
|
+
const reach = new Set();
|
|
45
|
+
visited.set(node, reach); // guard against cycles
|
|
46
|
+
const direct = children.get(node);
|
|
47
|
+
if (direct) {
|
|
48
|
+
for (const child of direct) {
|
|
49
|
+
reach.add(child);
|
|
50
|
+
for (const grandchild of reachable(child)) {
|
|
51
|
+
reach.add(grandchild);
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
return reach;
|
|
56
|
+
}
|
|
57
|
+
// Compute for every source node
|
|
58
|
+
const allNodes = new Set();
|
|
59
|
+
for (const [a, b] of pairs) {
|
|
60
|
+
allNodes.add(a);
|
|
61
|
+
allNodes.add(b);
|
|
62
|
+
}
|
|
63
|
+
for (const node of allNodes) {
|
|
64
|
+
for (const target of reachable(node)) {
|
|
65
|
+
result.push([node, target]);
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
return result;
|
|
69
|
+
}
|
|
70
|
+
// ── Pure inference engine ──────────────────────────────────────────────
|
|
71
|
+
export function computeInferences(triples) {
|
|
72
|
+
const assertedSet = new Set(triples.map(tripleKey));
|
|
73
|
+
const inferredMap = new Map(); // tripleKey → rule name
|
|
74
|
+
const rules = {};
|
|
75
|
+
function addInferred(t, rule) {
|
|
76
|
+
const key = tripleKey(t);
|
|
77
|
+
if (assertedSet.has(key) || inferredMap.has(key))
|
|
78
|
+
return;
|
|
79
|
+
inferredMap.set(key, rule);
|
|
80
|
+
rules[rule] = (rules[rule] ?? 0) + 1;
|
|
81
|
+
}
|
|
82
|
+
// ── Extract schema information ─────────────────────────────────────
|
|
83
|
+
// Direct subClassOf pairs
|
|
84
|
+
const subClassPairs = [];
|
|
85
|
+
// Direct subPropertyOf pairs
|
|
86
|
+
const subPropertyPairs = [];
|
|
87
|
+
// Domain declarations: property → class
|
|
88
|
+
const domainMap = new Map();
|
|
89
|
+
// Range declarations: property → class
|
|
90
|
+
const rangeMap = new Map();
|
|
91
|
+
// Instance-of triples: subject → set of classes
|
|
92
|
+
const instanceOf = [];
|
|
93
|
+
// All property-usage triples (non-rdf:type, non-schema predicates)
|
|
94
|
+
const propertyUsages = [];
|
|
95
|
+
for (const t of triples) {
|
|
96
|
+
if (t.p === RDFS_SUBCLASS_OF && !t.isLiteral) {
|
|
97
|
+
subClassPairs.push([t.s, t.o]);
|
|
98
|
+
}
|
|
99
|
+
else if (t.p === RDFS_SUBPROPERTY_OF && !t.isLiteral) {
|
|
100
|
+
subPropertyPairs.push([t.s, t.o]);
|
|
101
|
+
}
|
|
102
|
+
else if (t.p === RDFS_DOMAIN && !t.isLiteral) {
|
|
103
|
+
const existing = domainMap.get(t.s);
|
|
104
|
+
if (existing)
|
|
105
|
+
existing.push(t.o);
|
|
106
|
+
else
|
|
107
|
+
domainMap.set(t.s, [t.o]);
|
|
108
|
+
}
|
|
109
|
+
else if (t.p === RDFS_RANGE && !t.isLiteral) {
|
|
110
|
+
const existing = rangeMap.get(t.s);
|
|
111
|
+
if (existing)
|
|
112
|
+
existing.push(t.o);
|
|
113
|
+
else
|
|
114
|
+
rangeMap.set(t.s, [t.o]);
|
|
115
|
+
}
|
|
116
|
+
else if (t.p === RDF_TYPE && !t.isLiteral) {
|
|
117
|
+
instanceOf.push({ s: t.s, c: t.o });
|
|
118
|
+
}
|
|
119
|
+
else {
|
|
120
|
+
propertyUsages.push(t);
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
// ── rdfs11: subClassOf transitivity ────────────────────────────────
|
|
124
|
+
const allSubClassPairs = transitiveClosure(subClassPairs);
|
|
125
|
+
const subClassDirect = new Set(subClassPairs.map(([a, b]) => `${a}\t${b}`));
|
|
126
|
+
for (const [a, c] of allSubClassPairs) {
|
|
127
|
+
if (!subClassDirect.has(`${a}\t${c}`)) {
|
|
128
|
+
addInferred({ s: a, p: RDFS_SUBCLASS_OF, o: c, isLiteral: false }, 'rdfs11');
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
// Build complete superclass map (including transitive)
|
|
132
|
+
const superClasses = new Map();
|
|
133
|
+
for (const [sub, sup] of allSubClassPairs) {
|
|
134
|
+
let set = superClasses.get(sub);
|
|
135
|
+
if (!set) {
|
|
136
|
+
set = new Set();
|
|
137
|
+
superClasses.set(sub, set);
|
|
138
|
+
}
|
|
139
|
+
set.add(sup);
|
|
140
|
+
}
|
|
141
|
+
// ── subPropertyOf transitivity (feeds rdfs7) ──────────────────────
|
|
142
|
+
const allSubPropertyPairs = transitiveClosure(subPropertyPairs);
|
|
143
|
+
const subPropDirect = new Set(subPropertyPairs.map(([a, b]) => `${a}\t${b}`));
|
|
144
|
+
for (const [a, c] of allSubPropertyPairs) {
|
|
145
|
+
if (!subPropDirect.has(`${a}\t${c}`)) {
|
|
146
|
+
addInferred({ s: a, p: RDFS_SUBPROPERTY_OF, o: c, isLiteral: false }, 'rdfs7');
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
// Build super-property map
|
|
150
|
+
const superProperties = new Map();
|
|
151
|
+
for (const [sub, sup] of allSubPropertyPairs) {
|
|
152
|
+
let set = superProperties.get(sub);
|
|
153
|
+
if (!set) {
|
|
154
|
+
set = new Set();
|
|
155
|
+
superProperties.set(sub, set);
|
|
156
|
+
}
|
|
157
|
+
set.add(sup);
|
|
158
|
+
}
|
|
159
|
+
// ── rdfs9: subClassOf instance propagation ─────────────────────────
|
|
160
|
+
for (const { s, c } of instanceOf) {
|
|
161
|
+
if (isMeta(s))
|
|
162
|
+
continue;
|
|
163
|
+
const supers = superClasses.get(c);
|
|
164
|
+
if (!supers)
|
|
165
|
+
continue;
|
|
166
|
+
for (const superClass of supers) {
|
|
167
|
+
addInferred({ s, p: RDF_TYPE, o: superClass, isLiteral: false }, 'rdfs9');
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
// ── rdfs7: subPropertyOf propagation ───────────────────────────────
|
|
171
|
+
for (const t of propertyUsages) {
|
|
172
|
+
if (isMeta(t.s))
|
|
173
|
+
continue;
|
|
174
|
+
const supers = superProperties.get(t.p);
|
|
175
|
+
if (!supers)
|
|
176
|
+
continue;
|
|
177
|
+
for (const superProp of supers) {
|
|
178
|
+
addInferred({ s: t.s, p: superProp, o: t.o, isLiteral: t.isLiteral }, 'rdfs7');
|
|
179
|
+
}
|
|
180
|
+
}
|
|
181
|
+
// ── rdfs2: domain inference ────────────────────────────────────────
|
|
182
|
+
for (const t of propertyUsages) {
|
|
183
|
+
if (isMeta(t.s))
|
|
184
|
+
continue;
|
|
185
|
+
const domains = domainMap.get(t.p);
|
|
186
|
+
if (!domains)
|
|
187
|
+
continue;
|
|
188
|
+
for (const domainClass of domains) {
|
|
189
|
+
addInferred({ s: t.s, p: RDF_TYPE, o: domainClass, isLiteral: false }, 'rdfs2');
|
|
190
|
+
// Also propagate superclasses of the domain class (rdfs9 follow-up)
|
|
191
|
+
const supers = superClasses.get(domainClass);
|
|
192
|
+
if (supers) {
|
|
193
|
+
for (const superClass of supers) {
|
|
194
|
+
addInferred({ s: t.s, p: RDF_TYPE, o: superClass, isLiteral: false }, 'rdfs9');
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
}
|
|
199
|
+
// ── rdfs3: range inference ─────────────────────────────────────────
|
|
200
|
+
for (const t of propertyUsages) {
|
|
201
|
+
if (t.isLiteral || isMeta(t.o))
|
|
202
|
+
continue;
|
|
203
|
+
const ranges = rangeMap.get(t.p);
|
|
204
|
+
if (!ranges)
|
|
205
|
+
continue;
|
|
206
|
+
for (const rangeClass of ranges) {
|
|
207
|
+
addInferred({ s: t.o, p: RDF_TYPE, o: rangeClass, isLiteral: false }, 'rdfs3');
|
|
208
|
+
// Also propagate superclasses of the range class (rdfs9 follow-up)
|
|
209
|
+
const supers = superClasses.get(rangeClass);
|
|
210
|
+
if (supers) {
|
|
211
|
+
for (const superClass of supers) {
|
|
212
|
+
addInferred({ s: t.o, p: RDF_TYPE, o: superClass, isLiteral: false }, 'rdfs9');
|
|
213
|
+
}
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
}
|
|
217
|
+
// ── Collect results ────────────────────────────────────────────────
|
|
218
|
+
const inferred = [];
|
|
219
|
+
for (const [key] of inferredMap) {
|
|
220
|
+
const parts = key.split('\t');
|
|
221
|
+
inferred.push({
|
|
222
|
+
s: parts[0],
|
|
223
|
+
p: parts[1],
|
|
224
|
+
o: parts[2],
|
|
225
|
+
isLiteral: parts[3] === 'true',
|
|
226
|
+
});
|
|
227
|
+
}
|
|
228
|
+
return { inferred, rules };
|
|
229
|
+
}
|
|
230
|
+
// ── Turtle serialization helpers ───────────────────────────────────────
|
|
231
|
+
function triplesToTurtle(triples) {
|
|
232
|
+
return new Promise((resolve, reject) => {
|
|
233
|
+
const writer = new Writer({ format: 'Turtle' });
|
|
234
|
+
for (const t of triples) {
|
|
235
|
+
const subject = namedNode(t.s);
|
|
236
|
+
const predicate = namedNode(t.p);
|
|
237
|
+
// All inferred triples from RDFS rules produce URIs (rdf:type, subClassOf, etc.)
|
|
238
|
+
// Literals only appear in rdfs7 (subPropertyOf propagation), but those keep isLiteral flag.
|
|
239
|
+
// For safety, we always produce NamedNode for object since our rules only infer URI objects.
|
|
240
|
+
const object = namedNode(t.o);
|
|
241
|
+
writer.addQuad(subject, predicate, object);
|
|
242
|
+
}
|
|
243
|
+
writer.end((err, result) => {
|
|
244
|
+
if (err)
|
|
245
|
+
reject(new Error(`Failed to serialize inferred triples: ${err.message}`));
|
|
246
|
+
else
|
|
247
|
+
resolve(result);
|
|
248
|
+
});
|
|
249
|
+
});
|
|
250
|
+
}
|
|
251
|
+
// ── Turtle parsing ─────────────────────────────────────────────────────
|
|
252
|
+
function parseTurtleToTriples(turtle) {
|
|
253
|
+
return new Promise((resolve, reject) => {
|
|
254
|
+
const parser = new Parser();
|
|
255
|
+
const result = [];
|
|
256
|
+
parser.parse(turtle, (err, quad) => {
|
|
257
|
+
if (err) {
|
|
258
|
+
reject(new Error(`Failed to parse Turtle: ${err.message}`));
|
|
259
|
+
return;
|
|
260
|
+
}
|
|
261
|
+
if (quad) {
|
|
262
|
+
result.push({
|
|
263
|
+
s: quad.subject.value,
|
|
264
|
+
p: quad.predicate.value,
|
|
265
|
+
o: quad.object.value,
|
|
266
|
+
isLiteral: quad.object.termType === 'Literal',
|
|
267
|
+
});
|
|
268
|
+
}
|
|
269
|
+
else {
|
|
270
|
+
resolve(result);
|
|
271
|
+
}
|
|
272
|
+
});
|
|
273
|
+
});
|
|
274
|
+
}
|
|
275
|
+
// ── Public API ─────────────────────────────────────────────────────────
|
|
276
|
+
export async function clearInferences(adapter, graphUri) {
|
|
277
|
+
const inferenceGraph = getInferenceGraphUri(graphUri);
|
|
278
|
+
// Retrieve bookkeeping copy of inferred triples, then remove them from the main graph
|
|
279
|
+
const inferredTurtle = await adapter.exportGraph(inferenceGraph);
|
|
280
|
+
if (inferredTurtle.trim()) {
|
|
281
|
+
await adapter.deleteTriples(graphUri, { turtle: inferredTurtle });
|
|
282
|
+
}
|
|
283
|
+
// Drop the bookkeeping graph
|
|
284
|
+
await adapter.sparqlUpdate(`DROP SILENT GRAPH <${inferenceGraph}>`);
|
|
285
|
+
}
|
|
286
|
+
export async function materializeInferences(adapter, graphUri) {
|
|
287
|
+
// 1. Clear any previously materialised inferences first
|
|
288
|
+
await clearInferences(adapter, graphUri);
|
|
289
|
+
// 2. Fetch all triples from the (now clean) asserted graph
|
|
290
|
+
const turtle = await adapter.exportGraph(graphUri);
|
|
291
|
+
// 3. Parse into Triple[]
|
|
292
|
+
const triples = turtle.trim() ? await parseTurtleToTriples(turtle) : [];
|
|
293
|
+
// 4. Compute inferences (pure function)
|
|
294
|
+
const { inferred, rules } = computeInferences(triples);
|
|
295
|
+
// 5. Insert inferred triples if any
|
|
296
|
+
if (inferred.length > 0) {
|
|
297
|
+
const inferredTurtle = await triplesToTurtle(inferred);
|
|
298
|
+
// Insert into the MAIN graph so regular queries find them without cross-graph joins
|
|
299
|
+
await adapter.insertTurtle(graphUri, inferredTurtle);
|
|
300
|
+
// Also insert into the bookkeeping graph so status/clear can account for them
|
|
301
|
+
const inferenceGraph = getInferenceGraphUri(graphUri);
|
|
302
|
+
await adapter.insertTurtle(inferenceGraph, inferredTurtle);
|
|
303
|
+
}
|
|
304
|
+
// 6. Return result
|
|
305
|
+
return {
|
|
306
|
+
assertedCount: triples.length,
|
|
307
|
+
inferredCount: inferred.length,
|
|
308
|
+
rules,
|
|
309
|
+
};
|
|
310
|
+
}
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
export interface ShaclViolation {
|
|
2
|
+
focusNode: string;
|
|
3
|
+
path: string | null;
|
|
4
|
+
message: string;
|
|
5
|
+
severity: string;
|
|
6
|
+
}
|
|
7
|
+
export interface ShaclReport {
|
|
8
|
+
conforms: boolean;
|
|
9
|
+
violations: ShaclViolation[];
|
|
10
|
+
}
|
|
11
|
+
/**
|
|
12
|
+
* Discover .ttl shape files in the given directory (defaults to shapes/ in cwd).
|
|
13
|
+
*/
|
|
14
|
+
export declare function discoverShapes(dir?: string): string[];
|
|
15
|
+
/**
|
|
16
|
+
* Check whether the shapes directory exists and contains at least one .ttl file.
|
|
17
|
+
*/
|
|
18
|
+
export declare function hasShapes(shapesDir?: string): boolean;
|
|
19
|
+
/**
|
|
20
|
+
* Validate a Turtle data string against the given SHACL shape files.
|
|
21
|
+
*/
|
|
22
|
+
export declare function validateWithShacl(dataTurtle: string, shapePaths: string[]): Promise<ShaclReport>;
|