eyeling 1.22.16 → 1.23.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/HANDBOOK.md +93 -17
- package/dist/browser/eyeling.browser.js +248 -34
- package/eyeling.js +255 -34
- package/index.d.ts +17 -5
- package/index.js +29 -8
- package/lib/cli.js +44 -32
- package/lib/engine.js +4 -2
- package/lib/multisource.js +198 -0
- package/package.json +1 -1
- package/test/api.test.js +101 -0
|
@@ -0,0 +1,198 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Eyeling Reasoner — multi-source parsing helpers
|
|
3
|
+
*
|
|
4
|
+
* These helpers let the CLI/API parse several N3 documents independently and
|
|
5
|
+
* merge their parsed ASTs before reasoning. This avoids building one giant N3
|
|
6
|
+
* string while preserving the existing lexer/parser/engine pipeline.
|
|
7
|
+
*/
|
|
8
|
+
|
|
9
|
+
'use strict';
|
|
10
|
+
|
|
11
|
+
const { lex } = require('./lexer');
|
|
12
|
+
const { Parser } = require('./parser');
|
|
13
|
+
const {
|
|
14
|
+
Blank,
|
|
15
|
+
ListTerm,
|
|
16
|
+
OpenListTerm,
|
|
17
|
+
GraphTerm,
|
|
18
|
+
Triple,
|
|
19
|
+
Rule,
|
|
20
|
+
PrefixEnv,
|
|
21
|
+
annotateQuotedGraphTerm,
|
|
22
|
+
} = require('./prelude');
|
|
23
|
+
|
|
24
|
+
function emptyParsedDocument() {
|
|
25
|
+
return {
|
|
26
|
+
prefixes: PrefixEnv.newDefault(),
|
|
27
|
+
triples: [],
|
|
28
|
+
frules: [],
|
|
29
|
+
brules: [],
|
|
30
|
+
logQueryRules: [],
|
|
31
|
+
};
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
function parseN3Text(text, opts = {}) {
|
|
35
|
+
const { baseIri = '', label = '<input>' } = opts || {};
|
|
36
|
+
const tokens = lex(text);
|
|
37
|
+
const parser = new Parser(tokens);
|
|
38
|
+
if (baseIri) parser.prefixes.setBase(baseIri);
|
|
39
|
+
const [prefixes, triples, frules, brules, logQueryRules] = parser.parseDocument();
|
|
40
|
+
return { prefixes, triples, frules, brules, logQueryRules, tokens, text, label };
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
function sourceBlankPrefix(sourceIndex) {
|
|
44
|
+
return `_:src${sourceIndex}_`;
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
function scopedBlankLabel(label, sourceIndex, mapping) {
|
|
48
|
+
const key = String(label || '');
|
|
49
|
+
let out = mapping.get(key);
|
|
50
|
+
if (out) return out;
|
|
51
|
+
|
|
52
|
+
const bare = key.startsWith('_:') ? key.slice(2) : key;
|
|
53
|
+
out = sourceBlankPrefix(sourceIndex) + bare;
|
|
54
|
+
mapping.set(key, out);
|
|
55
|
+
return out;
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
function scopeBlankNodesInDocument(doc, sourceIndex) {
|
|
59
|
+
const mapping = new Map();
|
|
60
|
+
|
|
61
|
+
function cloneTerm(term) {
|
|
62
|
+
if (term instanceof Blank) return new Blank(scopedBlankLabel(term.label, sourceIndex, mapping));
|
|
63
|
+
if (term instanceof ListTerm) return new ListTerm(term.elems.map(cloneTerm));
|
|
64
|
+
if (term instanceof OpenListTerm) return new OpenListTerm(term.prefix.map(cloneTerm), term.tailVar);
|
|
65
|
+
if (term instanceof GraphTerm) return annotateQuotedGraphTerm(new GraphTerm(term.triples.map(cloneTriple)));
|
|
66
|
+
return term;
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
function cloneTriple(triple) {
|
|
70
|
+
return new Triple(cloneTerm(triple.s), cloneTerm(triple.p), cloneTerm(triple.o));
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
function cloneRule(rule) {
|
|
74
|
+
const headBlankLabels = new Set();
|
|
75
|
+
if (rule && rule.headBlankLabels instanceof Set) {
|
|
76
|
+
for (const label of rule.headBlankLabels) headBlankLabels.add(scopedBlankLabel(label, sourceIndex, mapping));
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
const out = new Rule(
|
|
80
|
+
(rule.premise || []).map(cloneTriple),
|
|
81
|
+
(rule.conclusion || []).map(cloneTriple),
|
|
82
|
+
rule.isForward,
|
|
83
|
+
rule.isFuse,
|
|
84
|
+
headBlankLabels,
|
|
85
|
+
);
|
|
86
|
+
|
|
87
|
+
if (rule && Object.prototype.hasOwnProperty.call(rule, '__dynamicConclusionTerm')) {
|
|
88
|
+
Object.defineProperty(out, '__dynamicConclusionTerm', {
|
|
89
|
+
value: cloneTerm(rule.__dynamicConclusionTerm),
|
|
90
|
+
enumerable: false,
|
|
91
|
+
writable: false,
|
|
92
|
+
configurable: true,
|
|
93
|
+
});
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
return out;
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
return {
|
|
100
|
+
prefixes: doc.prefixes,
|
|
101
|
+
triples: (doc.triples || []).map(cloneTriple),
|
|
102
|
+
frules: (doc.frules || []).map(cloneRule),
|
|
103
|
+
brules: (doc.brules || []).map(cloneRule),
|
|
104
|
+
logQueryRules: (doc.logQueryRules || []).map(cloneRule),
|
|
105
|
+
tokens: doc.tokens,
|
|
106
|
+
text: doc.text,
|
|
107
|
+
label: doc.label,
|
|
108
|
+
};
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
function mergePrefixEnvs(target, source) {
|
|
112
|
+
if (!source) return target;
|
|
113
|
+
const map = source.map || {};
|
|
114
|
+
for (const [prefix, iri] of Object.entries(map)) {
|
|
115
|
+
// Every parser starts with an empty default namespace. Do not let a later
|
|
116
|
+
// source that never declared ':' erase a useful default namespace from an
|
|
117
|
+
// earlier source; prefix merging is for output readability only.
|
|
118
|
+
if (iri || !Object.prototype.hasOwnProperty.call(target.map, prefix)) target.set(prefix, iri);
|
|
119
|
+
}
|
|
120
|
+
if (source.baseIri) target.setBase(source.baseIri);
|
|
121
|
+
return target;
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
function mergeParsedDocuments(docs, opts = {}) {
|
|
125
|
+
const documents = Array.isArray(docs) ? docs : [];
|
|
126
|
+
const scopeBlankNodes = typeof opts.scopeBlankNodes === 'boolean' ? opts.scopeBlankNodes : documents.length > 1;
|
|
127
|
+
|
|
128
|
+
const merged = emptyParsedDocument();
|
|
129
|
+
const mergedSources = [];
|
|
130
|
+
|
|
131
|
+
for (let i = 0; i < documents.length; i++) {
|
|
132
|
+
const originalDoc = documents[i] || emptyParsedDocument();
|
|
133
|
+
const doc = scopeBlankNodes ? scopeBlankNodesInDocument(originalDoc, i + 1) : originalDoc;
|
|
134
|
+
|
|
135
|
+
mergePrefixEnvs(merged.prefixes, doc.prefixes);
|
|
136
|
+
merged.triples.push(...(doc.triples || []));
|
|
137
|
+
merged.frules.push(...(doc.frules || []));
|
|
138
|
+
merged.brules.push(...(doc.brules || []));
|
|
139
|
+
merged.logQueryRules.push(...(doc.logQueryRules || []));
|
|
140
|
+
mergedSources.push(doc);
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
Object.defineProperty(merged, 'sources', {
|
|
144
|
+
value: mergedSources,
|
|
145
|
+
enumerable: false,
|
|
146
|
+
writable: false,
|
|
147
|
+
configurable: true,
|
|
148
|
+
});
|
|
149
|
+
|
|
150
|
+
return merged;
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
function isN3SourceListInput(input) {
|
|
154
|
+
return !!(input && typeof input === 'object' && !Array.isArray(input) && Array.isArray(input.sources));
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
function normalizeN3SourceItem(source, index) {
|
|
158
|
+
const sourceNumber = index + 1;
|
|
159
|
+
if (typeof source === 'string') {
|
|
160
|
+
return { text: source, label: `<source ${sourceNumber}>`, baseIri: '' };
|
|
161
|
+
}
|
|
162
|
+
if (!source || typeof source !== 'object' || Array.isArray(source)) {
|
|
163
|
+
throw new TypeError('Each N3 source must be a string or an object with an n3/text field');
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
const text = typeof source.n3 === 'string' ? source.n3 : typeof source.text === 'string' ? source.text : null;
|
|
167
|
+
if (text === null) throw new TypeError('Each N3 source object must provide an n3 or text string');
|
|
168
|
+
|
|
169
|
+
return {
|
|
170
|
+
text,
|
|
171
|
+
label: typeof source.label === 'string' && source.label ? source.label : `<source ${sourceNumber}>`,
|
|
172
|
+
baseIri: typeof source.baseIri === 'string' ? source.baseIri : '',
|
|
173
|
+
};
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
function parseN3SourceList(input, opts = {}) {
|
|
177
|
+
if (!isN3SourceListInput(input)) return null;
|
|
178
|
+
const sources = input.sources.map(normalizeN3SourceItem);
|
|
179
|
+
const defaultBaseIri = typeof opts.baseIri === 'string' ? opts.baseIri : '';
|
|
180
|
+
const parsed = sources.map((source, index) =>
|
|
181
|
+
parseN3Text(source.text, {
|
|
182
|
+
label: source.label,
|
|
183
|
+
baseIri: source.baseIri || (sources.length === 1 ? defaultBaseIri : ''),
|
|
184
|
+
}),
|
|
185
|
+
);
|
|
186
|
+
return mergeParsedDocuments(parsed, {
|
|
187
|
+
scopeBlankNodes: typeof input.scopeBlankNodes === 'boolean' ? input.scopeBlankNodes : parsed.length > 1,
|
|
188
|
+
});
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
module.exports = {
|
|
192
|
+
emptyParsedDocument,
|
|
193
|
+
parseN3Text,
|
|
194
|
+
mergeParsedDocuments,
|
|
195
|
+
scopeBlankNodesInDocument,
|
|
196
|
+
isN3SourceListInput,
|
|
197
|
+
parseN3SourceList,
|
|
198
|
+
};
|
package/package.json
CHANGED
package/test/api.test.js
CHANGED
|
@@ -2309,6 +2309,107 @@ _:b a ex:Person ; ex:name "B" .
|
|
|
2309
2309
|
`,
|
|
2310
2310
|
expect: [/^:test\s+:is\s+true\s*\./m],
|
|
2311
2311
|
},
|
|
2312
|
+
{
|
|
2313
|
+
name: '69 CLI multi-input: parses files separately and reasons over merged AST',
|
|
2314
|
+
run() {
|
|
2315
|
+
const os = require('node:os');
|
|
2316
|
+
const tmp = fs.mkdtempSync(path.join(os.tmpdir(), 'eyeling-multi-input-'));
|
|
2317
|
+
const factsPath = path.join(tmp, 'facts.n3');
|
|
2318
|
+
const rulesPath = path.join(tmp, 'rules.n3');
|
|
2319
|
+
|
|
2320
|
+
fs.writeFileSync(factsPath, '@prefix : <http://example.org/> .\n:Socrates a :Man .\n', 'utf8');
|
|
2321
|
+
fs.writeFileSync(rulesPath, '@prefix : <http://example.org/> .\n{ ?x a :Man } => { ?x a :Mortal } .\n', 'utf8');
|
|
2322
|
+
|
|
2323
|
+
try {
|
|
2324
|
+
const r = spawnSync(process.execPath, [path.join(ROOT, 'eyeling.js'), factsPath, rulesPath], {
|
|
2325
|
+
encoding: 'utf8',
|
|
2326
|
+
stdio: ['ignore', 'pipe', 'pipe'],
|
|
2327
|
+
maxBuffer: DEFAULT_MAX_BUFFER,
|
|
2328
|
+
});
|
|
2329
|
+
if (r.error) throw r.error;
|
|
2330
|
+
if (r.status !== 0) {
|
|
2331
|
+
const err = new Error(`CLI failed with exit ${r.status}`);
|
|
2332
|
+
err.code = r.status;
|
|
2333
|
+
err.stdout = r.stdout;
|
|
2334
|
+
err.stderr = r.stderr;
|
|
2335
|
+
throw err;
|
|
2336
|
+
}
|
|
2337
|
+
return r.stdout;
|
|
2338
|
+
} finally {
|
|
2339
|
+
fs.rmSync(tmp, { recursive: true, force: true });
|
|
2340
|
+
}
|
|
2341
|
+
},
|
|
2342
|
+
expect: [/:(?:Socrates)\s+a\s+:(?:Mortal)\s*\./],
|
|
2343
|
+
},
|
|
2344
|
+
{
|
|
2345
|
+
name: '70 CLI multi-input: scopes blank node labels per source',
|
|
2346
|
+
run() {
|
|
2347
|
+
const os = require('node:os');
|
|
2348
|
+
const tmp = fs.mkdtempSync(path.join(os.tmpdir(), 'eyeling-multi-blank-'));
|
|
2349
|
+
const leftPath = path.join(tmp, 'left.n3');
|
|
2350
|
+
const rightPath = path.join(tmp, 'right.n3');
|
|
2351
|
+
const rulePath = path.join(tmp, 'rule.n3');
|
|
2352
|
+
|
|
2353
|
+
fs.writeFileSync(leftPath, '@prefix : <http://example.org/> .\n_:x :p :a .\n', 'utf8');
|
|
2354
|
+
fs.writeFileSync(rightPath, '@prefix : <http://example.org/> .\n_:x :q :b .\n', 'utf8');
|
|
2355
|
+
fs.writeFileSync(
|
|
2356
|
+
rulePath,
|
|
2357
|
+
'@prefix : <http://example.org/> .\n{ ?x :p :a . ?x :q :b . } => { :bad :merged true } .\n',
|
|
2358
|
+
'utf8',
|
|
2359
|
+
);
|
|
2360
|
+
|
|
2361
|
+
try {
|
|
2362
|
+
const r = spawnSync(process.execPath, [path.join(ROOT, 'eyeling.js'), leftPath, rightPath, rulePath], {
|
|
2363
|
+
encoding: 'utf8',
|
|
2364
|
+
stdio: ['ignore', 'pipe', 'pipe'],
|
|
2365
|
+
maxBuffer: DEFAULT_MAX_BUFFER,
|
|
2366
|
+
});
|
|
2367
|
+
if (r.error) throw r.error;
|
|
2368
|
+
if (r.status !== 0) {
|
|
2369
|
+
const err = new Error(`CLI failed with exit ${r.status}`);
|
|
2370
|
+
err.code = r.status;
|
|
2371
|
+
err.stdout = r.stdout;
|
|
2372
|
+
err.stderr = r.stderr;
|
|
2373
|
+
throw err;
|
|
2374
|
+
}
|
|
2375
|
+
return r.stdout;
|
|
2376
|
+
} finally {
|
|
2377
|
+
fs.rmSync(tmp, { recursive: true, force: true });
|
|
2378
|
+
}
|
|
2379
|
+
},
|
|
2380
|
+
notExpect: [/^:bad\s+:merged\s+true\s*\./m],
|
|
2381
|
+
},
|
|
2382
|
+
{
|
|
2383
|
+
name: '71 API multi-source: reason() accepts source list input',
|
|
2384
|
+
run() {
|
|
2385
|
+
return reason(
|
|
2386
|
+
{ proofComments: false },
|
|
2387
|
+
{
|
|
2388
|
+
sources: [
|
|
2389
|
+
'@prefix : <http://example.org/> .\n:Socrates a :Man .\n',
|
|
2390
|
+
'@prefix : <http://example.org/> .\n{ ?x a :Man } => { ?x a :Mortal } .\n',
|
|
2391
|
+
],
|
|
2392
|
+
},
|
|
2393
|
+
);
|
|
2394
|
+
},
|
|
2395
|
+
expect: [/:(?:Socrates)\s+a\s+:(?:Mortal)\s*\./],
|
|
2396
|
+
},
|
|
2397
|
+
{
|
|
2398
|
+
name: '72 API multi-source: reasonStream() accepts source list input',
|
|
2399
|
+
run() {
|
|
2400
|
+
const result = reasonStream(
|
|
2401
|
+
{
|
|
2402
|
+
sources: [
|
|
2403
|
+
'@prefix : <http://example.org/> .\n:Socrates a :Man .\n',
|
|
2404
|
+
'@prefix : <http://example.org/> .\n{ ?x a :Man } => { ?x a :Mortal } .\n',
|
|
2405
|
+
],
|
|
2406
|
+
},
|
|
2407
|
+
{ proof: false },
|
|
2408
|
+
);
|
|
2409
|
+
return result.closureN3;
|
|
2410
|
+
},
|
|
2411
|
+
expect: [/:(?:Socrates)\s+a\s+:(?:Mortal)\s*\./],
|
|
2412
|
+
},
|
|
2312
2413
|
{
|
|
2313
2414
|
name: 'regression: log:semantics body alpha-renaming does not refire blank-head rule forever',
|
|
2314
2415
|
async run() {
|