eyeling 1.7.0 → 1.7.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -99,13 +99,15 @@ npm run test:packlist
99
99
  ### Usage
100
100
 
101
101
  ```
102
- Usage: eyeling.js [options] <file.n3>
102
+ Usage: eyeling [options] <file.n3>
103
103
 
104
104
  Options:
105
105
  -h, --help Show this help and exit.
106
106
  -v, --version Print version and exit.
107
107
  -p, --proof-comments Enable proof explanations.
108
108
  -n, --no-proof-comments Disable proof explanations (default).
109
+ -s, --super-restricted Disable all builtins except => and <=.
110
+ -a, --ast Print parsed AST as JSON and exit.
109
111
  ```
110
112
 
111
113
  By default, `eyeling`:
@@ -1,7 +1,7 @@
1
1
  # alignment-demo.n3
2
2
  # Minimal alignment example (SKOS mappings + inferred roll-up to a reference concept)
3
3
 
4
- @prefix ex: <http://example.org/> .
4
+ @prefix ex: <http://example.org/#> .
5
5
  @prefix ref: <http://example.org/taxonomy/ref/> .
6
6
  @prefix tel: <http://example.org/taxonomy/telraam/> .
7
7
  @prefix anpr: <http://example.org/taxonomy/anpr/> .
@@ -0,0 +1,7 @@
1
+ @prefix : <http://example/#> .
2
+ @prefix log: <http://www.w3.org/2000/10/swap/log#> .
3
+ @prefix string: <http://www.w3.org/2000/10/swap/string#> .
4
+
5
+ :phayes a :Person ; :givenName "Pat"; :familyName "Hayes" .
6
+
7
+ { ?x a :Person . ?SCOPE log:notIncludes { ?x :name ?someName . } . ?x :givenName ?name1 . ?x :familyName ?name2 . (?name1 " " ?name2) string:concatenation ?FN . } => { ?x :name ?FN } .
@@ -0,0 +1,12 @@
1
+ PREFIX : <http://example/#>
2
+
3
+ DATA { :phayes a :Person ; :givenName "Pat"; :familyName "Hayes" . }
4
+
5
+ # Default value - calculate a name
6
+ RULE { ?x :name ?FN } WHERE {
7
+ ?x a :Person
8
+ NOT { ?x :name ?someName }
9
+ ?x :givenName ?name1 ;
10
+ :familyName ?name2 .
11
+ BIND(concat(?name1, " ", ?name2) AS ?FN)
12
+ }
@@ -0,0 +1,10 @@
1
+ @prefix : <http://example.org/#> .
2
+
3
+ :A :fatherOf :X . :B :motherOf :X . :C :motherOf :A .
4
+
5
+ { ?y :fatherOf ?x . } => { ?x :childOf ?y } .
6
+ { ?y :motherOf ?x . } => { ?x :childOf ?y } .
7
+ { ?x :childOf ?y . } => { ?x :descendedFrom ?y } .
8
+ { ?x :childOf ?z . ?z :childOf ?y . } => { ?x :descendedFrom ?y } .
9
+ { ?y :descendedFrom ?x . } => { ?x :ancestorOf ?y } .
10
+ { ?a :ancestorOf ?c . ?c :ancestorOf ?b . } => { ?a :ancestorOf ?b } .
@@ -0,0 +1,12 @@
1
+ PREFIX : <http://example.org/#>
2
+
3
+ DATA { :A :fatherOf :X . :B :motherOf :X . :C :motherOf :A . }
4
+
5
+ RULE { ?x :childOf ?y } WHERE { ?y :fatherOf ?x }
6
+ RULE { ?x :childOf ?y } WHERE { ?y :motherOf ?x }
7
+
8
+ RULE { ?x :descendedFrom ?y } WHERE { ?x :childOf ?y }
9
+ RULE { ?x :descendedFrom ?y } WHERE { ?x :childOf ?z . ?z :childOf ?y }
10
+
11
+ RULE { ?x :ancestorOf ?y } WHERE { ?y :descendedFrom ?x }
12
+ RULE { ?a :ancestorOf ?b } WHERE { ?a :ancestorOf ?c . ?c :ancestorOf ?b }
@@ -0,0 +1,8 @@
1
+ @prefix : <http://example/#> .
2
+ @prefix math: <http://www.w3.org/2000/10/swap/math#> .
3
+
4
+ :x :p 1 ; :q 2 .
5
+
6
+ { ?x :p ?v1 . ?x :q ?v2 . ?v1 math:greaterThan 0 . ?v2 math:greaterThan 0 . } => { ?x :bothPositive true . } .
7
+ { ?x :p ?v1 . ?x :q ?v2 . ?v1 math:equalTo 0 . } => { ?x :oneIsZero true . } .
8
+ { ?x :p ?v1 . ?x :q ?v2 . ?v2 math:equalTo 0 . } => { ?x :oneIsZero true . } .
@@ -0,0 +1,9 @@
1
+ PREFIX : <http://example/#>
2
+
3
+ DATA { :x :p 1 ; :q 2 . }
4
+
5
+ RULE { ?x :bothPositive true . }
6
+ WHERE { ?x :p ?v1 FILTER ( ?v1 > 0 ) ?x :q ?v2 FILTER ( ?v2 > 0 ) }
7
+
8
+ RULE { ?x :oneIsZero true . }
9
+ WHERE { ?x :p ?v1 ; :q ?v2 FILTER ( ( ?v1 = 0 ) || ( ?v2 = 0 ) ) }
@@ -7,7 +7,7 @@
7
7
  @prefix string: <http://www.w3.org/2000/10/swap/string#> .
8
8
  @prefix list: <http://www.w3.org/2000/10/swap/list#> .
9
9
  @prefix log: <http://www.w3.org/2000/10/swap/log#> .
10
- @prefix ex: <http://example.org/> .
10
+ @prefix ex: <http://example.org/#> .
11
11
 
12
12
  ex:doc ex:json """{
13
13
  "users": [
@@ -9,7 +9,7 @@
9
9
  @prefix math: <http://www.w3.org/2000/10/swap/math#> .
10
10
  @prefix log: <http://www.w3.org/2000/10/swap/log#> .
11
11
  @prefix owl: <http://www.w3.org/2002/07/owl#> .
12
- @prefix ex: <http://example.org/> .
12
+ @prefix ex: <http://example.org/#> .
13
13
 
14
14
  # ------------------------------------------------------------------
15
15
  # Master data (canonical N3 graph)
@@ -2,7 +2,7 @@
2
2
  # log:collectAllIn example
3
3
  # ========================
4
4
 
5
- @prefix : <http://example.org/>.
5
+ @prefix : <http://example.org/#>.
6
6
  @prefix log: <http://www.w3.org/2000/10/swap/log#> .
7
7
  @prefix string: <http://www.w3.org/2000/10/swap/string#> .
8
8
 
@@ -2,7 +2,7 @@
2
2
  # log:forAllIn example
3
3
  # ====================
4
4
 
5
- @prefix : <http://example.org/>.
5
+ @prefix : <http://example.org/#>.
6
6
  @prefix log: <http://www.w3.org/2000/10/swap/log#> .
7
7
 
8
8
  :c a :CompositeTask ;
@@ -2,7 +2,7 @@
2
2
  # log:notIncludes example
3
3
  # =======================
4
4
 
5
- @prefix : <http://example.org/> .
5
+ @prefix : <http://example.org/#> .
6
6
  @prefix log: <http://www.w3.org/2000/10/swap/log#> .
7
7
  @prefix xsd: <http://www.w3.org/2001/XMLSchema#> .
8
8
  @prefix math: <http://www.w3.org/2000/10/swap/math#> .
@@ -2,7 +2,7 @@
2
2
  # log:skolem example
3
3
  # ==================
4
4
 
5
- @prefix : <http://example.org/> .
5
+ @prefix : <http://example.org/#> .
6
6
  @prefix log: <http://www.w3.org/2000/10/swap/log#> .
7
7
 
8
8
  {
@@ -2,7 +2,7 @@
2
2
  # log:uri example
3
3
  # ===============
4
4
 
5
- @prefix : <http://example.org/> .
5
+ @prefix : <http://example.org/#> .
6
6
  @prefix log: <http://www.w3.org/2000/10/swap/log#> .
7
7
 
8
8
  {
@@ -14,7 +14,7 @@
14
14
  # eyeling minimal-skos-alignment.n3
15
15
  # ------------------------------------------------------------
16
16
 
17
- @prefix ex: <http://example.org/> .
17
+ @prefix ex: <http://example.org/#> .
18
18
  @prefix ref: <http://example.org/ref/> .
19
19
  @prefix anpr: <http://example.org/anpr/> .
20
20
  @prefix skos: <http://www.w3.org/2004/02/skos/core#> .
@@ -1,5 +1,5 @@
1
1
  @prefix anpr: <http://example.org/taxonomy/anpr/> .
2
- @prefix ex: <http://example.org/> .
2
+ @prefix ex: <http://example.org/#> .
3
3
  @prefix ref: <http://example.org/taxonomy/ref/> .
4
4
  @prefix skos: <http://www.w3.org/2004/02/skos/core#> .
5
5
  @prefix tel: <http://example.org/taxonomy/telraam/> .
@@ -0,0 +1,3 @@
1
+ @prefix : <http://example/#> .
2
+
3
+ :phayes :name "Pat Hayes" .
@@ -0,0 +1,13 @@
1
+ @prefix : <http://example.org/#> .
2
+
3
+ :X :childOf :A .
4
+ :X :childOf :B .
5
+ :A :childOf :C .
6
+ :X :descendedFrom :A .
7
+ :X :descendedFrom :B .
8
+ :A :descendedFrom :C .
9
+ :X :descendedFrom :C .
10
+ :A :ancestorOf :X .
11
+ :B :ancestorOf :X .
12
+ :C :ancestorOf :A .
13
+ :C :ancestorOf :X .
@@ -0,0 +1,3 @@
1
+ @prefix : <http://example/#> .
2
+
3
+ :x :bothPositive true .
@@ -1,4 +1,4 @@
1
- @prefix ex: <http://example.org/> .
1
+ @prefix ex: <http://example.org/#> .
2
2
 
3
3
  ex:checks ex:firstUserNameOk true .
4
4
  <urn:example:user:u1> a ex:AllowedUser .
@@ -1,4 +1,4 @@
1
- @prefix ex: <http://example.org/> .
1
+ @prefix ex: <http://example.org/#> .
2
2
  @prefix genid: <https://eyereasoner.github.io/.well-known/genid/> .
3
3
  @prefix owl: <http://www.w3.org/2002/07/owl#> .
4
4
 
@@ -1,4 +1,4 @@
1
- @prefix : <http://example.org/> .
1
+ @prefix : <http://example.org/#> .
2
2
 
3
3
  :result1 :is ("Huey" "Dewey" "Louie") .
4
4
  :result2 :is (("Huey") ("Dewey") ("Louie")) .
@@ -1,3 +1,3 @@
1
- @prefix : <http://example.org/> .
1
+ @prefix : <http://example.org/#> .
2
2
 
3
3
  :result :is true .
@@ -1,4 +1,4 @@
1
- @prefix : <http://example.org/> .
1
+ @prefix : <http://example.org/#> .
2
2
  @prefix log: <http://www.w3.org/2000/10/swap/log#> .
3
3
 
4
4
  :x :y 1 .
@@ -1,4 +1,4 @@
1
- @prefix : <http://example.org/> .
1
+ @prefix : <http://example.org/#> .
2
2
  @prefix genid: <https://eyereasoner.github.io/.well-known/genid/> .
3
3
 
4
- :Result :skolem genid:0c562a90-a44f-264a-86e5-9664948aa178 .
4
+ :Result :skolem genid:32007e38-bd28-5f18-4712-ae1df708a5d4 .
@@ -1,4 +1,4 @@
1
- @prefix : <http://example.org/> .
1
+ @prefix : <http://example.org/#> .
2
2
 
3
3
  :uriString :is "https://www.w3.org" .
4
4
  :uriResource :is <https://www.w3.org> .
@@ -1,5 +1,5 @@
1
1
  @prefix anpr: <http://example.org/anpr/> .
2
- @prefix ex: <http://example.org/> .
2
+ @prefix ex: <http://example.org/#> .
3
3
  @prefix ref: <http://example.org/ref/> .
4
4
 
5
5
  anpr:VehicleWithPlate ex:treatedAs ref:Car .
@@ -1,3 +1,3 @@
1
- @prefix : <http://example.org/> .
1
+ @prefix : <http://example.org/#> .
2
2
 
3
3
  :Alice :hates :Nobody .
@@ -1,5 +1,5 @@
1
1
  @prefix anpr: <http://example.org/taxonomy/anpr/> .
2
- @prefix ex: <http://example.org/> .
2
+ @prefix ex: <http://example.org/#> .
3
3
  @prefix genid: <https://eyereasoner.github.io/.well-known/genid/> .
4
4
  @prefix prov: <http://www.w3.org/ns/prov#> .
5
5
  @prefix ref: <http://example.org/taxonomy/ref/> .
@@ -130,18 +130,18 @@ anpr:PassengerCar ex:narrowerOrEqualOf ref:RoadUser .
130
130
  anpr:Van ex:narrowerOrEqualOf ref:RoadUser .
131
131
  anpr:Truck ex:narrowerOrEqualOf ref:RoadUser .
132
132
  anpr:Bus ex:narrowerOrEqualOf ref:RoadUser .
133
- genid:5d9446e0-5075-531c-f72b-7fb8f210a4dc a sosa:Observation .
134
- genid:5d9446e0-5075-531c-f72b-7fb8f210a4dc a ex:AggregatedObservation .
135
- genid:5d9446e0-5075-531c-f72b-7fb8f210a4dc sosa:observedProperty ref:Car .
136
- genid:5d9446e0-5075-531c-f72b-7fb8f210a4dc sosa:hasFeatureOfInterest ex:segment1 .
137
- genid:5d9446e0-5075-531c-f72b-7fb8f210a4dc sosa:resultTime "2025-12-23T08:00:00Z"^^xsd:dateTime .
138
- genid:5d9446e0-5075-531c-f72b-7fb8f210a4dc sosa:hasSimpleResult 22 .
139
- genid:5d9446e0-5075-531c-f72b-7fb8f210a4dc ex:basedOnRequest ex:request_cars_at_t1 .
140
- genid:5d9446e0-5075-531c-f72b-7fb8f210a4dc ex:contributingValues (10 2 8 1 1 0) .
141
- genid:5d9446e0-5075-531c-f72b-7fb8f210a4dc ex:contributingObservations (ex:obs_tel_car ex:obs_tel_heavy ex:obs_anpr_passenger ex:obs_anpr_van ex:obs_anpr_truck ex:obs_anpr_bus) .
142
- genid:5d9446e0-5075-531c-f72b-7fb8f210a4dc prov:wasDerivedFrom ex:obs_tel_car .
143
- genid:5d9446e0-5075-531c-f72b-7fb8f210a4dc prov:wasDerivedFrom ex:obs_tel_heavy .
144
- genid:5d9446e0-5075-531c-f72b-7fb8f210a4dc prov:wasDerivedFrom ex:obs_anpr_passenger .
145
- genid:5d9446e0-5075-531c-f72b-7fb8f210a4dc prov:wasDerivedFrom ex:obs_anpr_van .
146
- genid:5d9446e0-5075-531c-f72b-7fb8f210a4dc prov:wasDerivedFrom ex:obs_anpr_truck .
147
- genid:5d9446e0-5075-531c-f72b-7fb8f210a4dc prov:wasDerivedFrom ex:obs_anpr_bus .
133
+ genid:baddfb7c-3e49-3ca8-bc4f-6de82f22dc60 a sosa:Observation .
134
+ genid:baddfb7c-3e49-3ca8-bc4f-6de82f22dc60 a ex:AggregatedObservation .
135
+ genid:baddfb7c-3e49-3ca8-bc4f-6de82f22dc60 sosa:observedProperty ref:Car .
136
+ genid:baddfb7c-3e49-3ca8-bc4f-6de82f22dc60 sosa:hasFeatureOfInterest ex:segment1 .
137
+ genid:baddfb7c-3e49-3ca8-bc4f-6de82f22dc60 sosa:resultTime "2025-12-23T08:00:00Z"^^xsd:dateTime .
138
+ genid:baddfb7c-3e49-3ca8-bc4f-6de82f22dc60 sosa:hasSimpleResult 22 .
139
+ genid:baddfb7c-3e49-3ca8-bc4f-6de82f22dc60 ex:basedOnRequest ex:request_cars_at_t1 .
140
+ genid:baddfb7c-3e49-3ca8-bc4f-6de82f22dc60 ex:contributingValues (10 2 8 1 1 0) .
141
+ genid:baddfb7c-3e49-3ca8-bc4f-6de82f22dc60 ex:contributingObservations (ex:obs_tel_car ex:obs_tel_heavy ex:obs_anpr_passenger ex:obs_anpr_van ex:obs_anpr_truck ex:obs_anpr_bus) .
142
+ genid:baddfb7c-3e49-3ca8-bc4f-6de82f22dc60 prov:wasDerivedFrom ex:obs_tel_car .
143
+ genid:baddfb7c-3e49-3ca8-bc4f-6de82f22dc60 prov:wasDerivedFrom ex:obs_tel_heavy .
144
+ genid:baddfb7c-3e49-3ca8-bc4f-6de82f22dc60 prov:wasDerivedFrom ex:obs_anpr_passenger .
145
+ genid:baddfb7c-3e49-3ca8-bc4f-6de82f22dc60 prov:wasDerivedFrom ex:obs_anpr_van .
146
+ genid:baddfb7c-3e49-3ca8-bc4f-6de82f22dc60 prov:wasDerivedFrom ex:obs_anpr_truck .
147
+ genid:baddfb7c-3e49-3ca8-bc4f-6de82f22dc60 prov:wasDerivedFrom ex:obs_anpr_bus .
package/examples/snaf.n3 CHANGED
@@ -3,7 +3,7 @@
3
3
  # ==========================
4
4
 
5
5
  @prefix log: <http://www.w3.org/2000/10/swap/log#>.
6
- @prefix : <http://example.org/>.
6
+ @prefix : <http://example.org/#>.
7
7
 
8
8
  :Alice :loves :Bob.
9
9
  :Bob a :Person.
package/examples/snaf.srl CHANGED
@@ -1,7 +1,6 @@
1
1
  PREFIX log: <http://www.w3.org/2000/10/swap/log#>
2
- PREFIX : <http://example.org/>
2
+ PREFIX : <http://example.org/#>
3
3
 
4
- :Alice :loves :Bob.
5
- :Bob a :Person.
4
+ DATA { :Alice :loves :Bob. :Bob a :Person. }
6
5
 
7
6
  RULE { :Alice :hates :Nobody. } WHERE { NOT { :Alice :hates ?X } ?X a :Person. }
@@ -10,7 +10,7 @@
10
10
  # or:
11
11
  # node eyeling.js traffic-skos-aggregate.n3
12
12
 
13
- @prefix ex: <http://example.org/> .
13
+ @prefix ex: <http://example.org/#> .
14
14
  @prefix ref: <http://example.org/taxonomy/ref/> .
15
15
  @prefix tel: <http://example.org/taxonomy/telraam/> .
16
16
  @prefix anpr: <http://example.org/taxonomy/anpr/> .
package/eyeling.js CHANGED
@@ -315,13 +315,24 @@ class DerivedFact {
315
315
  // ===========================================================================
316
316
 
317
317
  class Token {
318
- constructor(typ, value = null) {
318
+ constructor(typ, value = null, offset = null) {
319
319
  this.typ = typ;
320
320
  this.value = value;
321
+ // Codepoint offset in the original source (Array.from(text) index).
322
+ this.offset = offset;
321
323
  }
322
324
  toString() {
323
- if (this.value == null) return `Token(${this.typ})`;
324
- return `Token(${this.typ}, ${JSON.stringify(this.value)})`;
325
+ const loc = typeof this.offset === 'number' ? `@${this.offset}` : '';
326
+ if (this.value == null) return `Token(${this.typ}${loc})`;
327
+ return `Token(${this.typ}${loc}, ${JSON.stringify(this.value)})`;
328
+ }
329
+ }
330
+
331
+ class N3SyntaxError extends SyntaxError {
332
+ constructor(message, offset = null) {
333
+ super(message);
334
+ this.name = 'N3SyntaxError';
335
+ this.offset = offset;
325
336
  }
326
337
  }
327
338
 
@@ -434,12 +445,12 @@ function lex(inputText) {
434
445
  // 3) Two-character operators: => and <=
435
446
  if (c === '=') {
436
447
  if (peek(1) === '>') {
437
- tokens.push(new Token('OpImplies'));
448
+ tokens.push(new Token('OpImplies', null, i));
438
449
  i += 2;
439
450
  continue;
440
451
  } else {
441
452
  // N3 syntactic sugar: '=' means owl:sameAs
442
- tokens.push(new Token('Equals'));
453
+ tokens.push(new Token('Equals', null, i));
443
454
  i += 1;
444
455
  continue;
445
456
  }
@@ -447,17 +458,18 @@ function lex(inputText) {
447
458
 
448
459
  if (c === '<') {
449
460
  if (peek(1) === '=') {
450
- tokens.push(new Token('OpImpliedBy'));
461
+ tokens.push(new Token('OpImpliedBy', null, i));
451
462
  i += 2;
452
463
  continue;
453
464
  }
454
465
  // N3 predicate inversion: "<-" (swap subject/object for this predicate)
455
466
  if (peek(1) === '-') {
456
- tokens.push(new Token('OpPredInvert'));
467
+ tokens.push(new Token('OpPredInvert', null, i));
457
468
  i += 2;
458
469
  continue;
459
470
  }
460
471
  // Otherwise IRIREF <...>
472
+ const start = i;
461
473
  i++; // skip '<'
462
474
  const iriChars = [];
463
475
  while (i < n && chars[i] !== '>') {
@@ -465,27 +477,27 @@ function lex(inputText) {
465
477
  i++;
466
478
  }
467
479
  if (i >= n || chars[i] !== '>') {
468
- throw new Error('Unterminated IRI <...>');
480
+ throw new N3SyntaxError('Unterminated IRI <...>', start);
469
481
  }
470
482
  i++; // skip '>'
471
483
  const iri = iriChars.join('');
472
- tokens.push(new Token('IriRef', iri));
484
+ tokens.push(new Token('IriRef', iri, start));
473
485
  continue;
474
486
  }
475
487
 
476
488
  // 4) Path + datatype operators: !, ^, ^^
477
489
  if (c === '!') {
478
- tokens.push(new Token('OpPathFwd'));
490
+ tokens.push(new Token('OpPathFwd', null, i));
479
491
  i += 1;
480
492
  continue;
481
493
  }
482
494
  if (c === '^') {
483
495
  if (peek(1) === '^') {
484
- tokens.push(new Token('HatHat'));
496
+ tokens.push(new Token('HatHat', null, i));
485
497
  i += 2;
486
498
  continue;
487
499
  }
488
- tokens.push(new Token('OpPathRev'));
500
+ tokens.push(new Token('OpPathRev', null, i));
489
501
  i += 1;
490
502
  continue;
491
503
  }
@@ -503,13 +515,15 @@ function lex(inputText) {
503
515
  ',': 'Comma',
504
516
  '.': 'Dot',
505
517
  };
506
- tokens.push(new Token(mapping[c]));
518
+ tokens.push(new Token(mapping[c], null, i));
507
519
  i++;
508
520
  continue;
509
521
  }
510
522
 
511
523
  // String literal: short "..." or long """..."""
512
524
  if (c === '"') {
525
+ const start = i;
526
+
513
527
  // Long string literal """ ... """
514
528
  if (peek(1) === '"' && peek(2) === '"') {
515
529
  i += 3; // consume opening """
@@ -553,11 +567,11 @@ function lex(inputText) {
553
567
  sChars.push(cc);
554
568
  i++;
555
569
  }
556
- if (!closed) throw new Error('Unterminated long string literal """..."""');
570
+ if (!closed) throw new N3SyntaxError('Unterminated long string literal """..."""', start);
557
571
  const raw = '"""' + sChars.join('') + '"""';
558
572
  const decoded = decodeN3StringEscapes(stripQuotes(raw));
559
573
  const s = JSON.stringify(decoded); // canonical short quoted form
560
- tokens.push(new Token('Literal', s));
574
+ tokens.push(new Token('Literal', s, start));
561
575
  continue;
562
576
  }
563
577
 
@@ -582,12 +596,14 @@ function lex(inputText) {
582
596
  const raw = '"' + sChars.join('') + '"';
583
597
  const decoded = decodeN3StringEscapes(stripQuotes(raw));
584
598
  const s = JSON.stringify(decoded); // canonical short quoted form
585
- tokens.push(new Token('Literal', s));
599
+ tokens.push(new Token('Literal', s, start));
586
600
  continue;
587
601
  }
588
602
 
589
603
  // String literal: short '...' or long '''...'''
590
604
  if (c === "'") {
605
+ const start = i;
606
+
591
607
  // Long string literal ''' ... '''
592
608
  if (peek(1) === "'" && peek(2) === "'") {
593
609
  i += 3; // consume opening '''
@@ -631,11 +647,11 @@ function lex(inputText) {
631
647
  sChars.push(cc);
632
648
  i++;
633
649
  }
634
- if (!closed) throw new Error("Unterminated long string literal '''...'''");
650
+ if (!closed) throw new N3SyntaxError("Unterminated long string literal '''...'''", start);
635
651
  const raw = "'''" + sChars.join('') + "'''";
636
652
  const decoded = decodeN3StringEscapes(stripQuotes(raw));
637
653
  const s = JSON.stringify(decoded); // canonical short quoted form
638
- tokens.push(new Token('Literal', s));
654
+ tokens.push(new Token('Literal', s, start));
639
655
  continue;
640
656
  }
641
657
 
@@ -660,12 +676,13 @@ function lex(inputText) {
660
676
  const raw = "'" + sChars.join('') + "'";
661
677
  const decoded = decodeN3StringEscapes(stripQuotes(raw));
662
678
  const s = JSON.stringify(decoded); // canonical short quoted form
663
- tokens.push(new Token('Literal', s));
679
+ tokens.push(new Token('Literal', s, start));
664
680
  continue;
665
681
  }
666
682
 
667
683
  // Variable ?name
668
684
  if (c === '?') {
685
+ const start = i;
669
686
  i++;
670
687
  const nameChars = [];
671
688
  let cc;
@@ -674,12 +691,13 @@ function lex(inputText) {
674
691
  i++;
675
692
  }
676
693
  const name = nameChars.join('');
677
- tokens.push(new Token('Var', name));
694
+ tokens.push(new Token('Var', name, start));
678
695
  continue;
679
696
  }
680
697
 
681
698
  // Directives: @prefix, @base (and language tags after string literals)
682
699
  if (c === '@') {
700
+ const start = i;
683
701
  const prevTok = tokens.length ? tokens[tokens.length - 1] : null;
684
702
  const prevWasQuotedLiteral =
685
703
  prevTok && prevTok.typ === 'Literal' && typeof prevTok.value === 'string' && prevTok.value.startsWith('"');
@@ -692,7 +710,7 @@ function lex(inputText) {
692
710
  const tagChars = [];
693
711
  let cc = peek();
694
712
  if (cc === null || !/[A-Za-z]/.test(cc)) {
695
- throw new Error("Invalid language tag (expected [A-Za-z] after '@')");
713
+ throw new N3SyntaxError("Invalid language tag (expected [A-Za-z] after '@')", start);
696
714
  }
697
715
  while ((cc = peek()) !== null && /[A-Za-z]/.test(cc)) {
698
716
  tagChars.push(cc);
@@ -707,11 +725,11 @@ function lex(inputText) {
707
725
  i++;
708
726
  }
709
727
  if (!segChars.length) {
710
- throw new Error("Invalid language tag (expected [A-Za-z0-9]+ after '-')");
728
+ throw new N3SyntaxError("Invalid language tag (expected [A-Za-z0-9]+ after '-')", start);
711
729
  }
712
730
  tagChars.push(...segChars);
713
731
  }
714
- tokens.push(new Token('LangTag', tagChars.join('')));
732
+ tokens.push(new Token('LangTag', tagChars.join(''), start));
715
733
  continue;
716
734
  }
717
735
 
@@ -723,14 +741,15 @@ function lex(inputText) {
723
741
  i++;
724
742
  }
725
743
  const word = wordChars.join('');
726
- if (word === 'prefix') tokens.push(new Token('AtPrefix'));
727
- else if (word === 'base') tokens.push(new Token('AtBase'));
728
- else throw new Error(`Unknown directive @${word}`);
744
+ if (word === 'prefix') tokens.push(new Token('AtPrefix', null, start));
745
+ else if (word === 'base') tokens.push(new Token('AtBase', null, start));
746
+ else throw new N3SyntaxError(`Unknown directive @${word}`, start);
729
747
  continue;
730
748
  }
731
749
 
732
750
  // 6) Numeric literal (integer or float)
733
751
  if (/[0-9]/.test(c) || (c === '-' && peek(1) !== null && /[0-9]/.test(peek(1)))) {
752
+ const start = i;
734
753
  const numChars = [c];
735
754
  i++;
736
755
  while (i < n) {
@@ -770,11 +789,12 @@ function lex(inputText) {
770
789
  }
771
790
  }
772
791
 
773
- tokens.push(new Token('Literal', numChars.join('')));
792
+ tokens.push(new Token('Literal', numChars.join(''), start));
774
793
  continue;
775
794
  }
776
795
 
777
796
  // 7) Identifiers / keywords / QNames
797
+ const start = i;
778
798
  const wordChars = [];
779
799
  let cc;
780
800
  while ((cc = peek()) !== null && isNameChar(cc)) {
@@ -782,19 +802,19 @@ function lex(inputText) {
782
802
  i++;
783
803
  }
784
804
  if (!wordChars.length) {
785
- throw new Error(`Unexpected char: ${JSON.stringify(c)}`);
805
+ throw new N3SyntaxError(`Unexpected char: ${JSON.stringify(c)}`, i);
786
806
  }
787
807
  const word = wordChars.join('');
788
808
  if (word === 'true' || word === 'false') {
789
- tokens.push(new Token('Literal', word));
809
+ tokens.push(new Token('Literal', word, start));
790
810
  } else if ([...word].every((ch) => /[0-9.\-]/.test(ch))) {
791
- tokens.push(new Token('Literal', word));
811
+ tokens.push(new Token('Literal', word, start));
792
812
  } else {
793
- tokens.push(new Token('Ident', word));
813
+ tokens.push(new Token('Ident', word, start));
794
814
  }
795
815
  }
796
816
 
797
- tokens.push(new Token('EOF'));
817
+ tokens.push(new Token('EOF', null, n));
798
818
  return tokens;
799
819
  }
800
820
 
@@ -984,10 +1004,15 @@ class Parser {
984
1004
  return tok;
985
1005
  }
986
1006
 
1007
+ fail(message, tok = this.peek()) {
1008
+ const off = tok && typeof tok.offset === 'number' ? tok.offset : null;
1009
+ throw new N3SyntaxError(message, off);
1010
+ }
1011
+
987
1012
  expectDot() {
988
1013
  const tok = this.next();
989
1014
  if (tok.typ !== 'Dot') {
990
- throw new Error(`Expected '.', got ${tok.toString()}`);
1015
+ this.fail(`Expected '.', got ${tok.toString()}`, tok);
991
1016
  }
992
1017
  }
993
1018
 
@@ -1079,7 +1104,7 @@ class Parser {
1079
1104
  parsePrefixDirective() {
1080
1105
  const tok = this.next();
1081
1106
  if (tok.typ !== 'Ident') {
1082
- throw new Error(`Expected prefix name, got ${tok.toString()}`);
1107
+ this.fail(`Expected prefix name, got ${tok.toString()}`, tok);
1083
1108
  }
1084
1109
  const pref = tok.value || '';
1085
1110
  const prefName = pref.endsWith(':') ? pref.slice(0, -1) : pref;
@@ -1099,7 +1124,7 @@ class Parser {
1099
1124
  } else if (tok2.typ === 'Ident') {
1100
1125
  iri = this.prefixes.expandQName(tok2.value || '');
1101
1126
  } else {
1102
- throw new Error(`Expected IRI after @prefix, got ${tok2.toString()}`);
1127
+ this.fail(`Expected IRI after @prefix, got ${tok2.toString()}`, tok2);
1103
1128
  }
1104
1129
  this.expectDot();
1105
1130
  this.prefixes.set(prefName, iri);
@@ -1113,7 +1138,7 @@ class Parser {
1113
1138
  } else if (tok.typ === 'Ident') {
1114
1139
  iri = tok.value || '';
1115
1140
  } else {
1116
- throw new Error(`Expected IRI after @base, got ${tok.toString()}`);
1141
+ this.fail(`Expected IRI after @base, got ${tok.toString()}`, tok);
1117
1142
  }
1118
1143
  this.expectDot();
1119
1144
  this.prefixes.setBase(iri);
@@ -1123,7 +1148,7 @@ class Parser {
1123
1148
  // SPARQL/Turtle-style PREFIX directive: PREFIX pfx: <iri> (no trailing '.')
1124
1149
  const tok = this.next();
1125
1150
  if (tok.typ !== 'Ident') {
1126
- throw new Error(`Expected prefix name after PREFIX, got ${tok.toString()}`);
1151
+ this.fail(`Expected prefix name after PREFIX, got ${tok.toString()}`, tok);
1127
1152
  }
1128
1153
  const pref = tok.value || '';
1129
1154
  const prefName = pref.endsWith(':') ? pref.slice(0, -1) : pref;
@@ -1135,7 +1160,7 @@ class Parser {
1135
1160
  } else if (tok2.typ === 'Ident') {
1136
1161
  iri = this.prefixes.expandQName(tok2.value || '');
1137
1162
  } else {
1138
- throw new Error(`Expected IRI after PREFIX, got ${tok2.toString()}`);
1163
+ this.fail(`Expected IRI after PREFIX, got ${tok2.toString()}`, tok2);
1139
1164
  }
1140
1165
 
1141
1166
  // N3/Turtle: PREFIX directives do not have a trailing '.', but accept it permissively.
@@ -1153,7 +1178,7 @@ class Parser {
1153
1178
  } else if (tok.typ === 'Ident') {
1154
1179
  iri = tok.value || '';
1155
1180
  } else {
1156
- throw new Error(`Expected IRI after BASE, got ${tok.toString()}`);
1181
+ this.fail(`Expected IRI after BASE, got ${tok.toString()}`, tok);
1157
1182
  }
1158
1183
 
1159
1184
  // N3/Turtle: BASE directives do not have a trailing '.', but accept it permissively.
@@ -1213,7 +1238,7 @@ class Parser {
1213
1238
  if (this.peek().typ === 'LangTag') {
1214
1239
  // Only quoted string literals can carry a language tag.
1215
1240
  if (!(s.startsWith('"') && s.endsWith('"'))) {
1216
- throw new Error('Language tag is only allowed on quoted string literals');
1241
+ this.fail('Language tag is only allowed on quoted string literals', this.peek());
1217
1242
  }
1218
1243
  const langTok = this.next();
1219
1244
  const lang = langTok.value || '';
@@ -1221,7 +1246,7 @@ class Parser {
1221
1246
 
1222
1247
  // N3/Turtle: language tags and datatypes are mutually exclusive.
1223
1248
  if (this.peek().typ === 'HatHat') {
1224
- throw new Error('A literal cannot have both a language tag (@...) and a datatype (^^...)');
1249
+ this.fail('A literal cannot have both a language tag (@...) and a datatype (^^...)', this.peek());
1225
1250
  }
1226
1251
  }
1227
1252
 
@@ -1236,7 +1261,7 @@ class Parser {
1236
1261
  if (qn.includes(':')) dtIri = this.prefixes.expandQName(qn);
1237
1262
  else dtIri = qn;
1238
1263
  } else {
1239
- throw new Error(`Expected datatype after ^^, got ${dtTok.toString()}`);
1264
+ this.fail(`Expected datatype after ^^, got ${dtTok.toString()}`, dtTok);
1240
1265
  }
1241
1266
  s = `${s}^^<${dtIri}>`;
1242
1267
  }
@@ -1248,7 +1273,7 @@ class Parser {
1248
1273
  if (typ === 'LBracket') return this.parseBlank();
1249
1274
  if (typ === 'LBrace') return this.parseGraph();
1250
1275
 
1251
- throw new Error(`Unexpected term token: ${tok.toString()}`);
1276
+ this.fail(`Unexpected term token: ${tok.toString()}`, tok);
1252
1277
  }
1253
1278
 
1254
1279
  parseList() {
@@ -1271,12 +1296,12 @@ class Parser {
1271
1296
  // IRI property list: [ id <IRI> predicateObjectList? ]
1272
1297
  // Lets you embed descriptions of an IRI directly in object position.
1273
1298
  if (this.peek().typ === 'Ident' && (this.peek().value || '') === 'id') {
1274
- this.next(); // consume 'id'
1299
+ const iriTok = this.next(); // consume 'id'
1275
1300
  const iriTerm = this.parseTerm();
1276
1301
 
1277
1302
  // N3 note: 'id' form is not meant to be used with blank node identifiers.
1278
1303
  if (iriTerm instanceof Blank && iriTerm.label.startsWith('_:')) {
1279
- throw new Error("Cannot use 'id' keyword with a blank node identifier inside [...]");
1304
+ this.fail("Cannot use 'id' keyword with a blank node identifier inside [...]", iriTok);
1280
1305
  }
1281
1306
 
1282
1307
  // Optional ';' right after the id IRI (tolerated).
@@ -1322,7 +1347,7 @@ class Parser {
1322
1347
  }
1323
1348
 
1324
1349
  if (this.peek().typ !== 'RBracket') {
1325
- throw new Error(`Expected ']' at end of IRI property list, got ${JSON.stringify(this.peek())}`);
1350
+ this.fail(`Expected ']' at end of IRI property list, got ${this.peek().toString()}`);
1326
1351
  }
1327
1352
  this.next();
1328
1353
  return iriTerm;
@@ -1370,7 +1395,7 @@ class Parser {
1370
1395
  if (this.peek().typ === 'RBracket') {
1371
1396
  this.next();
1372
1397
  } else {
1373
- throw new Error(`Expected ']' at end of blank node property list, got ${JSON.stringify(this.peek())}`);
1398
+ this.fail(`Expected ']' at end of blank node property list, got ${this.peek().toString()}`);
1374
1399
  }
1375
1400
 
1376
1401
  return new Blank(id);
@@ -1389,7 +1414,7 @@ class Parser {
1389
1414
  else if (this.peek().typ === 'RBrace') {
1390
1415
  // ok
1391
1416
  } else {
1392
- throw new Error(`Expected '.' or '}', got ${this.peek().toString()}`);
1417
+ this.fail(`Expected '.' or '}', got ${this.peek().toString()}`);
1393
1418
  }
1394
1419
  } else if (this.peek().typ === 'OpImpliedBy') {
1395
1420
  this.next();
@@ -1400,7 +1425,7 @@ class Parser {
1400
1425
  else if (this.peek().typ === 'RBrace') {
1401
1426
  // ok
1402
1427
  } else {
1403
- throw new Error(`Expected '.' or '}', got ${this.peek().toString()}`);
1428
+ this.fail(`Expected '.' or '}', got ${this.peek().toString()}`);
1404
1429
  }
1405
1430
  } else {
1406
1431
  // N3 grammar allows: triples ::= subject predicateObjectList?
@@ -1419,7 +1444,7 @@ class Parser {
1419
1444
  else if (this.peek().typ === 'RBrace') {
1420
1445
  // ok
1421
1446
  } else {
1422
- throw new Error(`Expected '.' or '}', got ${this.peek().toString()}`);
1447
+ this.fail(`Expected '.' or '}', got ${this.peek().toString()}`);
1423
1448
  }
1424
1449
  }
1425
1450
  }
@@ -1452,7 +1477,7 @@ class Parser {
1452
1477
  this.next(); // consume "is"
1453
1478
  verb = this.parseTerm();
1454
1479
  if (!(this.peek().typ === 'Ident' && (this.peek().value || '') === 'of')) {
1455
- throw new Error(`Expected 'of' after 'is <expr>', got ${this.peek().toString()}`);
1480
+ this.fail(`Expected 'of' after 'is <expr>', got ${this.peek().toString()}`);
1456
1481
  }
1457
1482
  this.next(); // consume "of"
1458
1483
  invert = true;
@@ -4613,6 +4638,58 @@ function evalBuiltin(goal, subst, facts, backRules, depth, varGen) {
4613
4638
  return [{ ...subst }];
4614
4639
  }
4615
4640
 
4641
+ // log:conjunction
4642
+ // Schema: ( $s.i+ )+ log:conjunction $o?
4643
+ // $o is a formula containing a copy of each formula in the subject list.
4644
+ // Duplicates are removed.
4645
+ if (pv === LOG_NS + 'conjunction') {
4646
+ if (!(g.s instanceof ListTerm)) return [];
4647
+
4648
+ const parts = g.s.elems;
4649
+ if (!parts.length) return [];
4650
+
4651
+ /** @type {Triple[]} */
4652
+ const merged = [];
4653
+
4654
+ // Fast-path dedup for IRI/Literal-only triples.
4655
+ const fastKeySet = new Set();
4656
+
4657
+ for (const part of parts) {
4658
+ // Support the empty formula token `true`.
4659
+ if (part instanceof Literal && part.value === 'true') continue;
4660
+
4661
+ if (!(part instanceof GraphTerm)) return [];
4662
+
4663
+ for (const tr of part.triples) {
4664
+ const k = tripleFastKey(tr);
4665
+ if (k !== null) {
4666
+ if (fastKeySet.has(k)) continue;
4667
+ fastKeySet.add(k);
4668
+ merged.push(tr);
4669
+ continue;
4670
+ }
4671
+
4672
+ // Fallback: structural equality (still respects plain-string == xsd:string).
4673
+ let dup = false;
4674
+ for (const ex of merged) {
4675
+ if (triplesEqual(tr, ex)) {
4676
+ dup = true;
4677
+ break;
4678
+ }
4679
+ }
4680
+ if (!dup) merged.push(tr);
4681
+ }
4682
+ }
4683
+
4684
+ const outFormula = new GraphTerm(merged);
4685
+
4686
+ // Allow blank nodes as a don't-care output (common in builtin schemas).
4687
+ if (g.o instanceof Blank) return [{ ...subst }];
4688
+
4689
+ const s2 = unifyTerm(g.o, outFormula, subst);
4690
+ return s2 !== null ? [s2] : [];
4691
+ }
4692
+
4616
4693
  // log:dtlit
4617
4694
  // Schema: ( $s.1? $s.2? )? log:dtlit $o?
4618
4695
  // true iff $o is a datatyped literal with string value $s.1 and datatype IRI $s.2
@@ -4779,6 +4856,41 @@ function evalBuiltin(goal, subst, facts, backRules, depth, varGen) {
4779
4856
  return results;
4780
4857
  }
4781
4858
 
4859
+ // log:includes (provable in scope)
4860
+ // Schema: $s+ log:includes $o+
4861
+ // When the subject is a formula, the scope is that concrete formula (syntactic containment).
4862
+ // Otherwise, the scope is the current document scope (facts + backward rules).
4863
+ if (pv === LOG_NS + 'includes') {
4864
+ // Empty formula is always included.
4865
+ if (g.o instanceof Literal && g.o.value === 'true') return [{ ...subst }];
4866
+ if (!(g.o instanceof GraphTerm)) return [];
4867
+
4868
+ /** @type {Triple[] | null} */
4869
+ let scopeFacts = null;
4870
+ /** @type {Rule[]} */
4871
+ let scopeBackRules = backRules;
4872
+
4873
+ // If the subject is a formula, treat it as a concrete scope graph.
4874
+ // Also support `true` as the empty formula.
4875
+ if (g.s instanceof GraphTerm) {
4876
+ scopeFacts = g.s.triples.slice();
4877
+ ensureFactIndexes(scopeFacts);
4878
+ Object.defineProperty(scopeFacts, '__scopedSnapshot', { value: scopeFacts, enumerable: false, writable: true });
4879
+ scopeBackRules = []; // concrete scope = syntactic containment (no extra rules)
4880
+ } else if (g.s instanceof Literal && g.s.value === 'true') {
4881
+ scopeFacts = [];
4882
+ ensureFactIndexes(scopeFacts);
4883
+ Object.defineProperty(scopeFacts, '__scopedSnapshot', { value: scopeFacts, enumerable: false, writable: true });
4884
+ scopeBackRules = [];
4885
+ } else {
4886
+ scopeFacts = facts; // dynamic scope
4887
+ }
4888
+
4889
+ const visited2 = [];
4890
+ // Start from the incoming substitution so bindings flow outward.
4891
+ return proveGoals(Array.from(g.o.triples), { ...subst }, scopeFacts, scopeBackRules, depth + 1, visited2, varGen);
4892
+ }
4893
+
4782
4894
  // log:notIncludes (not provable in scope)
4783
4895
  // Delay until we have a frozen scope snapshot to avoid early success.
4784
4896
  if (pv === LOG_NS + 'notIncludes') {
@@ -5794,6 +5906,41 @@ function printExplanation(df, prefixes) {
5794
5906
  console.log('# ----------------------------------------------------------------------\n');
5795
5907
  }
5796
5908
 
5909
+
5910
+ function offsetToLineCol(text, offset) {
5911
+ const chars = Array.from(text);
5912
+ const n = Math.max(0, Math.min(typeof offset === 'number' ? offset : 0, chars.length));
5913
+ let line = 1;
5914
+ let col = 1;
5915
+ for (let i = 0; i < n; i++) {
5916
+ const c = chars[i];
5917
+ if (c === '\n') {
5918
+ line++;
5919
+ col = 1;
5920
+ } else if (c === '\r') {
5921
+ line++;
5922
+ col = 1;
5923
+ if (i + 1 < n && chars[i + 1] === '\n') i++; // swallow \n in CRLF
5924
+ } else {
5925
+ col++;
5926
+ }
5927
+ }
5928
+ return { line, col };
5929
+ }
5930
+
5931
+ function formatN3SyntaxError(err, text, path) {
5932
+ const off = err && typeof err.offset === 'number' ? err.offset : null;
5933
+ const label = path ? String(path) : '<input>';
5934
+ if (off === null) {
5935
+ return `Syntax error in ${label}: ${err && err.message ? err.message : String(err)}`;
5936
+ }
5937
+ const { line, col } = offsetToLineCol(text, off);
5938
+ const lines = String(text).split(/\r\n|\n|\r/);
5939
+ const lineText = lines[line - 1] ?? '';
5940
+ const caret = ' '.repeat(Math.max(0, col - 1)) + '^';
5941
+ return `Syntax error in ${label}:${line}:${col}: ${err.message}\n${lineText}\n${caret}`;
5942
+ }
5943
+
5797
5944
  // ===========================================================================
5798
5945
  // CLI entry point
5799
5946
  // ===========================================================================
@@ -5875,9 +6022,19 @@ function main() {
5875
6022
  process.exit(1);
5876
6023
  }
5877
6024
 
5878
- const toks = lex(text);
5879
- const parser = new Parser(toks);
5880
- const [prefixes, triples, frules, brules] = parser.parseDocument();
6025
+ let toks;
6026
+ let prefixes, triples, frules, brules;
6027
+ try {
6028
+ toks = lex(text);
6029
+ const parser = new Parser(toks);
6030
+ [prefixes, triples, frules, brules] = parser.parseDocument();
6031
+ } catch (e) {
6032
+ if (e && e.name === 'N3SyntaxError') {
6033
+ console.error(formatN3SyntaxError(e, text, path));
6034
+ process.exit(1);
6035
+ }
6036
+ throw e;
6037
+ }
5881
6038
  if (showAst) {
5882
6039
  function astReplacer(_key, value) {
5883
6040
  if (value instanceof Set) return Array.from(value);
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "eyeling",
3
- "version": "1.7.0",
3
+ "version": "1.7.2",
4
4
  "description": "A minimal Notation3 (N3) reasoner in JavaScript.",
5
5
  "main": "./index.js",
6
6
  "keywords": [