sparql 1.0.2 → 1.0.3
Sign up to get free protection for your applications and to get access to all the features.
- data/{README.markdown → README.md} +22 -5
- data/VERSION +1 -1
- data/lib/sparql.rb +1 -2
- data/lib/sparql/algebra/operator.rb +2 -2
- data/lib/sparql/algebra/operator/prefix.rb +8 -0
- data/lib/sparql/grammar.rb +2 -4
- data/lib/sparql/grammar/meta.rb +28543 -0
- data/lib/sparql/grammar/parser11.rb +1304 -0
- data/lib/sparql/grammar/terminals11.rb +186 -0
- metadata +22 -6
- data/lib/sparql/grammar/lexer.rb +0 -613
- data/lib/sparql/grammar/parser.rb +0 -1393
- data/lib/sparql/grammar/parser/meta.rb +0 -1805
@@ -1,1393 +0,0 @@
|
|
1
|
-
module SPARQL; module Grammar
|
2
|
-
##
|
3
|
-
# A parser for the SPARQL 1.0 grammar.
|
4
|
-
#
|
5
|
-
# @see http://www.w3.org/TR/rdf-sparql-query/#grammar
|
6
|
-
# @see http://en.wikipedia.org/wiki/LR_parser
|
7
|
-
# @see http://www.w3.org/2000/10/swap/grammar/predictiveParser.py
|
8
|
-
# @see http://www.w3.org/2001/sw/DataAccess/rq23/parsers/sparql.ttl
|
9
|
-
class Parser
|
10
|
-
include SPARQL::Grammar::Meta
|
11
|
-
|
12
|
-
START = SPARQL_GRAMMAR.Query
|
13
|
-
RDF_TYPE = (a = RDF.type.dup; a.lexical = 'a'; a).freeze
|
14
|
-
|
15
|
-
##
|
16
|
-
# Initializes a new parser instance.
|
17
|
-
#
|
18
|
-
# @param [String, #to_s] input
|
19
|
-
# @param [Hash{Symbol => Object}] options
|
20
|
-
# @option options [Hash] :prefixes (Hash.new)
|
21
|
-
# the prefix mappings to use (for acessing intermediate parser productions)
|
22
|
-
# @option options [#to_s] :base_uri (nil)
|
23
|
-
# the base URI to use when resolving relative URIs (for acessing intermediate parser productions)
|
24
|
-
# @option options [#to_s] :anon_base ("b0")
|
25
|
-
# Basis for generating anonymous Nodes
|
26
|
-
# @option options [Boolean] :resolve_uris (false)
|
27
|
-
# Resolve prefix and relative IRIs, otherwise, when serializing the parsed SSE
|
28
|
-
# as S-Expressions, use the original prefixed and relative URIs along with `base` and `prefix`
|
29
|
-
# definitions.
|
30
|
-
# @option options [Boolean] :validate (false)
|
31
|
-
# whether to validate the parsed statements and values
|
32
|
-
# @option options [Boolean] :progress
|
33
|
-
# Show progress of parser productions
|
34
|
-
# @option options [Boolean] :debug
|
35
|
-
# Detailed debug output
|
36
|
-
# @return [SPARQL::Grammar::Parser]
|
37
|
-
def initialize(input = nil, options = {})
|
38
|
-
@options = {:anon_base => "b0", :validate => false}.merge(options)
|
39
|
-
self.input = input if input
|
40
|
-
@productions = []
|
41
|
-
@vars = {}
|
42
|
-
@nd_var_gen = "0"
|
43
|
-
end
|
44
|
-
|
45
|
-
##
|
46
|
-
# Any additional options for the parser.
|
47
|
-
#
|
48
|
-
# @return [Hash]
|
49
|
-
attr_reader :options
|
50
|
-
|
51
|
-
##
|
52
|
-
# The current input string being processed.
|
53
|
-
#
|
54
|
-
# @return [String]
|
55
|
-
attr_accessor :input
|
56
|
-
|
57
|
-
##
|
58
|
-
# The current input tokens being processed.
|
59
|
-
#
|
60
|
-
# @return [Array<Token>]
|
61
|
-
attr_reader :tokens
|
62
|
-
|
63
|
-
##
|
64
|
-
# The internal representation of the result using hierarch of RDF objects and SPARQL::Algebra::Operator
|
65
|
-
# objects.
|
66
|
-
# @return [Array]
|
67
|
-
# @see http://sparql.rubyforge.org/algebra
|
68
|
-
attr_accessor :result
|
69
|
-
|
70
|
-
##
|
71
|
-
# @param [IO, StringIO, Lexer, Array, String, #to_s] input
|
72
|
-
# Query may be an array of lexed tokens, a lexer, or a
|
73
|
-
# string or open file.
|
74
|
-
# @return [void]
|
75
|
-
def input=(input)
|
76
|
-
case input
|
77
|
-
when Array
|
78
|
-
@input = nil # FIXME
|
79
|
-
@tokens = input
|
80
|
-
else
|
81
|
-
lexer = input.is_a?(Lexer) ? input : Lexer.new(input, @options)
|
82
|
-
@input = lexer.input
|
83
|
-
@tokens = lexer.to_a
|
84
|
-
end
|
85
|
-
end
|
86
|
-
|
87
|
-
##
|
88
|
-
# Returns `true` if the input string is syntactically valid.
|
89
|
-
#
|
90
|
-
# @return [Boolean]
|
91
|
-
def valid?
|
92
|
-
parse
|
93
|
-
rescue Error
|
94
|
-
false
|
95
|
-
end
|
96
|
-
|
97
|
-
# @return [String]
|
98
|
-
def to_sxp_bin
|
99
|
-
@result
|
100
|
-
end
|
101
|
-
|
102
|
-
def to_s
|
103
|
-
@result.to_sxp
|
104
|
-
end
|
105
|
-
|
106
|
-
# Parse query
|
107
|
-
#
|
108
|
-
# The result is a SPARQL Algebra S-List. Productions return an array such as the following:
|
109
|
-
#
|
110
|
-
# (prefix ((: <http://example/>))
|
111
|
-
# (union
|
112
|
-
# (bgp (triple ?s ?p ?o))
|
113
|
-
# (graph ?g
|
114
|
-
# (bgp (triple ?s ?p ?o)))))
|
115
|
-
#
|
116
|
-
# @param [Symbol, #to_s] prod The starting production for the parser.
|
117
|
-
# It may be a URI from the grammar, or a symbol representing the local_name portion of the grammar URI.
|
118
|
-
# @return [Array]
|
119
|
-
# @see http://www.w3.org/2001/sw/DataAccess/rq23/rq24-algebra.html
|
120
|
-
# @see http://axel.deri.ie/sparqltutorial/ESWC2007_SPARQL_Tutorial_unit2b.pdf
|
121
|
-
def parse(prod = START)
|
122
|
-
@prod_data = [{}]
|
123
|
-
prod = prod.to_s.split("#").last.to_sym unless prod.is_a?(Symbol)
|
124
|
-
todo_stack = [{:prod => prod, :terms => nil}]
|
125
|
-
|
126
|
-
while !todo_stack.empty?
|
127
|
-
pushed = false
|
128
|
-
if todo_stack.last[:terms].nil?
|
129
|
-
todo_stack.last[:terms] = []
|
130
|
-
token = tokens.first
|
131
|
-
@lineno = token.lineno if token
|
132
|
-
debug("parse(token)") {"#{token.inspect}, prod #{todo_stack.last[:prod]}, depth #{todo_stack.length}"}
|
133
|
-
|
134
|
-
# Got an opened production
|
135
|
-
onStart(abbr(todo_stack.last[:prod]))
|
136
|
-
break if token.nil?
|
137
|
-
|
138
|
-
cur_prod = todo_stack.last[:prod]
|
139
|
-
prod_branch = BRANCHES[cur_prod.to_sym]
|
140
|
-
error("parse", "No branches found for '#{abbr(cur_prod)}'",
|
141
|
-
:production => cur_prod, :token => token) if prod_branch.nil?
|
142
|
-
sequence = prod_branch[token.representation]
|
143
|
-
debug("parse(production)") do
|
144
|
-
"cur_prod #{cur_prod}, " +
|
145
|
-
"token #{token.representation.inspect} " +
|
146
|
-
"prod_branch #{prod_branch.keys.inspect}, " +
|
147
|
-
"sequence #{sequence.inspect}"
|
148
|
-
end
|
149
|
-
if sequence.nil?
|
150
|
-
expected = prod_branch.values.uniq.map {|u| u.map {|v| abbr(v).inspect}.join(",")}
|
151
|
-
error("parse", "Found '#{token.inspect}' when parsing a #{abbr(cur_prod)}. expected #{expected.join(' | ')}",
|
152
|
-
:production => cur_prod, :token => token)
|
153
|
-
end
|
154
|
-
todo_stack.last[:terms] += sequence
|
155
|
-
end
|
156
|
-
|
157
|
-
debug("parse(terms)") {"stack #{todo_stack.last.inspect}, depth #{todo_stack.length}"}
|
158
|
-
while !todo_stack.last[:terms].to_a.empty?
|
159
|
-
term = todo_stack.last[:terms].shift
|
160
|
-
debug {"parse tokens(#{term}): #{tokens.inspect}"}
|
161
|
-
if tokens.map(&:representation).include?(term)
|
162
|
-
token = accept(term)
|
163
|
-
@lineno = token.lineno if token
|
164
|
-
debug("parse") {"term(#{token.inspect}): #{term}"}
|
165
|
-
if token
|
166
|
-
onToken(abbr(term), token.value)
|
167
|
-
else
|
168
|
-
error("parse", "Found '#{word}...'; #{term} expected",
|
169
|
-
:production => todo_stack.last[:prod], :token => tokens.first)
|
170
|
-
end
|
171
|
-
else
|
172
|
-
todo_stack << {:prod => term, :terms => nil}
|
173
|
-
debug("parse(push)") {"stack #{term}, depth #{todo_stack.length}"}
|
174
|
-
pushed = true
|
175
|
-
break
|
176
|
-
end
|
177
|
-
end
|
178
|
-
|
179
|
-
while !pushed && !todo_stack.empty? && todo_stack.last[:terms].to_a.empty?
|
180
|
-
debug("parse(pop)") {"stack #{todo_stack.last.inspect}, depth #{todo_stack.length}"}
|
181
|
-
todo_stack.pop
|
182
|
-
onFinish
|
183
|
-
end
|
184
|
-
end
|
185
|
-
while !todo_stack.empty?
|
186
|
-
debug("parse(pop)") {"stack #{todo_stack.last.inspect}, depth #{todo_stack.length}"}
|
187
|
-
todo_stack.pop
|
188
|
-
onFinish
|
189
|
-
end
|
190
|
-
|
191
|
-
# The last thing on the @prod_data stack is the result
|
192
|
-
@result = case
|
193
|
-
when !prod_data.is_a?(Hash)
|
194
|
-
prod_data
|
195
|
-
when prod_data.empty?
|
196
|
-
nil
|
197
|
-
when prod_data[:query]
|
198
|
-
prod_data[:query].to_a.length == 1 ? prod_data[:query].first : prod_data[:query]
|
199
|
-
else
|
200
|
-
key = prod_data.keys.first
|
201
|
-
[key] + prod_data[key] # Creates [:key, [:triple], ...]
|
202
|
-
end
|
203
|
-
end
|
204
|
-
|
205
|
-
##
|
206
|
-
# Returns the URI prefixes currently defined for this parser.
|
207
|
-
#
|
208
|
-
# @example
|
209
|
-
# parser.prefixes[:dc] #=> RDF::URI('http://purl.org/dc/terms/')
|
210
|
-
#
|
211
|
-
# @return [Hash{Symbol => RDF::URI}]
|
212
|
-
# @since 0.3.0
|
213
|
-
def prefixes
|
214
|
-
@options[:prefixes] ||= {}
|
215
|
-
end
|
216
|
-
|
217
|
-
##
|
218
|
-
# Defines the given URI prefixes for this parser.
|
219
|
-
#
|
220
|
-
# @example
|
221
|
-
# parser.prefixes = {
|
222
|
-
# :dc => RDF::URI('http://purl.org/dc/terms/'),
|
223
|
-
# }
|
224
|
-
#
|
225
|
-
# @param [Hash{Symbol => RDF::URI}] prefixes
|
226
|
-
# @return [Hash{Symbol => RDF::URI}]
|
227
|
-
# @since 0.3.0
|
228
|
-
def prefixes=(prefixes)
|
229
|
-
@options[:prefixes] = prefixes
|
230
|
-
end
|
231
|
-
|
232
|
-
##
|
233
|
-
# Defines the given named URI prefix for this parser.
|
234
|
-
#
|
235
|
-
# @example Defining a URI prefix
|
236
|
-
# parser.prefix :dc, RDF::URI('http://purl.org/dc/terms/')
|
237
|
-
#
|
238
|
-
# @example Returning a URI prefix
|
239
|
-
# parser.prefix(:dc) #=> RDF::URI('http://purl.org/dc/terms/')
|
240
|
-
#
|
241
|
-
# @overload prefix(name, uri)
|
242
|
-
# @param [Symbol, #to_s] name
|
243
|
-
# @param [RDF::URI, #to_s] uri
|
244
|
-
#
|
245
|
-
# @overload prefix(name)
|
246
|
-
# @param [Symbol, #to_s] name
|
247
|
-
#
|
248
|
-
# @return [RDF::URI]
|
249
|
-
def prefix(name, uri = nil)
|
250
|
-
name = name.to_s.empty? ? nil : (name.respond_to?(:to_sym) ? name.to_sym : name.to_s.to_sym)
|
251
|
-
uri.nil? ? prefixes[name] : prefixes[name] = uri
|
252
|
-
end
|
253
|
-
|
254
|
-
##
|
255
|
-
# Returns the Base URI defined for the parser,
|
256
|
-
# as specified or when parsing a BASE prologue element.
|
257
|
-
#
|
258
|
-
# @example
|
259
|
-
# parser.base #=> RDF::URI('http://example.com/')
|
260
|
-
#
|
261
|
-
# @return [HRDF::URI]
|
262
|
-
def base_uri
|
263
|
-
RDF::URI(@options[:base_uri])
|
264
|
-
end
|
265
|
-
|
266
|
-
##
|
267
|
-
# Set the Base URI to use for this parser.
|
268
|
-
#
|
269
|
-
# @param [RDF::URI, #to_s] uri
|
270
|
-
#
|
271
|
-
# @example
|
272
|
-
# parser.base_uri = RDF::URI('http://purl.org/dc/terms/')
|
273
|
-
#
|
274
|
-
# @return [RDF::URI]
|
275
|
-
def base_uri=(uri)
|
276
|
-
@options[:base_uri] = RDF::URI(uri)
|
277
|
-
end
|
278
|
-
|
279
|
-
##
|
280
|
-
# Returns `true` if parsed statements and values should be validated.
|
281
|
-
#
|
282
|
-
# @return [Boolean] `true` or `false`
|
283
|
-
# @since 0.3.0
|
284
|
-
def validate?
|
285
|
-
@options[:validate]
|
286
|
-
end
|
287
|
-
|
288
|
-
private
|
289
|
-
|
290
|
-
# Handlers used to define actions for each productions.
|
291
|
-
# If a context is defined, create a producation data element and add to the @prod_data stack
|
292
|
-
# If entries are defined, pass production data to :start and/or :finish handlers
|
293
|
-
def contexts(production)
|
294
|
-
case production
|
295
|
-
when :Query
|
296
|
-
# [1] Query ::= Prologue ( SelectQuery | ConstructQuery | DescribeQuery | AskQuery )
|
297
|
-
{
|
298
|
-
:finish => lambda { |data| finalize_query(data) }
|
299
|
-
}
|
300
|
-
when :Prologue
|
301
|
-
# [2] Prologue ::= BaseDecl? PrefixDecl*
|
302
|
-
{
|
303
|
-
:finish => lambda { |data|
|
304
|
-
unless options[:resolve_uris]
|
305
|
-
# Only output if we're not resolving URIs internally
|
306
|
-
add_prod_datum(:BaseDecl, data[:BaseDecl])
|
307
|
-
add_prod_data(:PrefixDecl, data[:PrefixDecl]) if data[:PrefixDecl]
|
308
|
-
end
|
309
|
-
}
|
310
|
-
}
|
311
|
-
when :BaseDecl
|
312
|
-
# [3] BaseDecl ::= 'BASE' IRI_REF
|
313
|
-
{
|
314
|
-
:finish => lambda { |data|
|
315
|
-
self.base_uri = uri(data[:iri].last)
|
316
|
-
add_prod_datum(:BaseDecl, data[:iri].last) unless options[:resolve_uris]
|
317
|
-
}
|
318
|
-
}
|
319
|
-
when :PrefixDecl
|
320
|
-
# [4] PrefixDecl := 'PREFIX' PNAME_NS IRI_REF";
|
321
|
-
{
|
322
|
-
:finish => lambda { |data|
|
323
|
-
if data[:iri]
|
324
|
-
self.prefix(data[:prefix], data[:iri].last)
|
325
|
-
add_prod_data(:PrefixDecl, data[:iri].unshift("#{data[:prefix]}:".to_sym))
|
326
|
-
end
|
327
|
-
}
|
328
|
-
}
|
329
|
-
when :SelectQuery
|
330
|
-
# [5] SelectQuery ::= 'SELECT' ( 'DISTINCT' | 'REDUCED' )? ( Var+ | '*' ) DatasetClause* WhereClause SolutionModifier
|
331
|
-
{
|
332
|
-
:finish => lambda { |data|
|
333
|
-
query = merge_modifiers(data)
|
334
|
-
add_prod_datum(:query, query)
|
335
|
-
}
|
336
|
-
}
|
337
|
-
when :ConstructQuery
|
338
|
-
# [6] ConstructQuery ::= 'CONSTRUCT' ConstructTemplate DatasetClause* WhereClause SolutionModifier
|
339
|
-
{
|
340
|
-
:finish => lambda { |data|
|
341
|
-
query = merge_modifiers(data)
|
342
|
-
template = data[:ConstructTemplate] || []
|
343
|
-
|
344
|
-
add_prod_datum(:query, Algebra::Expression[:construct, template, query])
|
345
|
-
}
|
346
|
-
}
|
347
|
-
when :DescribeQuery
|
348
|
-
# [7] DescribeQuery ::= 'DESCRIBE' ( VarOrIRIref+ | '*' ) DatasetClause* WhereClause? SolutionModifier
|
349
|
-
{
|
350
|
-
:finish => lambda { |data|
|
351
|
-
query = merge_modifiers(data)
|
352
|
-
to_describe = data[:VarOrIRIref] || []
|
353
|
-
query = Algebra::Expression[:describe, to_describe, query]
|
354
|
-
add_prod_datum(:query, query)
|
355
|
-
}
|
356
|
-
}
|
357
|
-
when :AskQuery
|
358
|
-
# [8] AskQuery ::= 'ASK' DatasetClause* WhereClause
|
359
|
-
{
|
360
|
-
:finish => lambda { |data|
|
361
|
-
query = merge_modifiers(data)
|
362
|
-
add_prod_datum(:query, Algebra::Expression[:ask, query])
|
363
|
-
}
|
364
|
-
}
|
365
|
-
when :DefaultGraphClause
|
366
|
-
# [10] DefaultGraphClause ::= SourceSelector
|
367
|
-
{
|
368
|
-
:finish => lambda { |data|
|
369
|
-
add_prod_datum(:dataset, data[:IRIref])
|
370
|
-
}
|
371
|
-
}
|
372
|
-
when :NamedGraphClause
|
373
|
-
# [11] NamedGraphClause ::= 'NAMED' SourceSelector
|
374
|
-
{
|
375
|
-
:finish => lambda { |data|
|
376
|
-
add_prod_data(:dataset, data[:IRIref].unshift(:named))
|
377
|
-
}
|
378
|
-
}
|
379
|
-
when :SolutionModifier
|
380
|
-
# [14] SolutionModifier ::= OrderClause? LimitOffsetClauses?
|
381
|
-
{
|
382
|
-
:finish => lambda { |data|
|
383
|
-
add_prod_datum(:order, data[:order])
|
384
|
-
add_prod_datum(:slice, data[:slice])
|
385
|
-
}
|
386
|
-
}
|
387
|
-
when :LimitOffsetClauses
|
388
|
-
# [15] LimitOffsetClauses ::= ( LimitClause OffsetClause? | OffsetClause LimitClause? )
|
389
|
-
{
|
390
|
-
:finish => lambda { |data|
|
391
|
-
return unless data[:limit] || data[:offset]
|
392
|
-
limit = data[:limit] ? data[:limit].last : :_
|
393
|
-
offset = data[:offset] ? data[:offset].last : :_
|
394
|
-
add_prod_data(:slice, offset, limit)
|
395
|
-
}
|
396
|
-
}
|
397
|
-
when :OrderClause
|
398
|
-
# [16] OrderClause ::= 'ORDER' 'BY' OrderCondition+
|
399
|
-
{
|
400
|
-
:finish => lambda { |data|
|
401
|
-
# Output 2puls of order conditions from left to right
|
402
|
-
res = data[:OrderCondition]
|
403
|
-
if res = data[:OrderCondition]
|
404
|
-
res = [res] if [:asc, :desc].include?(res[0]) # Special case when there's only one condition and it's ASC (x) or DESC (x)
|
405
|
-
add_prod_data(:order, res)
|
406
|
-
end
|
407
|
-
}
|
408
|
-
}
|
409
|
-
when :OrderCondition
|
410
|
-
# [17] OrderCondition ::= ( ( 'ASC' | 'DESC' ) BrackettedExpression ) | ( Constraint | Var )
|
411
|
-
{
|
412
|
-
:finish => lambda { |data|
|
413
|
-
if data[:OrderDirection]
|
414
|
-
add_prod_datum(:OrderCondition, Algebra::Expression.for(data[:OrderDirection] + data[:Expression]))
|
415
|
-
else
|
416
|
-
add_prod_datum(:OrderCondition, data[:Constraint] || data[:Var])
|
417
|
-
end
|
418
|
-
}
|
419
|
-
}
|
420
|
-
when :LimitClause
|
421
|
-
# [18] LimitClause ::= 'LIMIT' INTEGER
|
422
|
-
{
|
423
|
-
:finish => lambda { |data| add_prod_datum(:limit, data[:literal]) }
|
424
|
-
}
|
425
|
-
when :OffsetClause
|
426
|
-
# [19] OffsetClause ::= 'OFFSET' INTEGER
|
427
|
-
{
|
428
|
-
:finish => lambda { |data| add_prod_datum(:offset, data[:literal]) }
|
429
|
-
}
|
430
|
-
when :GroupGraphPattern
|
431
|
-
# [20] GroupGraphPattern ::= '{' TriplesBlock? ( ( GraphPatternNotTriples | Filter ) '.'? TriplesBlock? )* '}'
|
432
|
-
{
|
433
|
-
:finish => lambda { |data|
|
434
|
-
query_list = data[:query_list]
|
435
|
-
debug("GroupGraphPattern") {"ql #{query_list.to_a.inspect}"}
|
436
|
-
debug("GroupGraphPattern") {"q #{data[:query] ? data[:query].first.inspect : 'nil'}"}
|
437
|
-
|
438
|
-
if query_list
|
439
|
-
lhs = data[:query].to_a.first
|
440
|
-
while !query_list.empty?
|
441
|
-
rhs = query_list.shift
|
442
|
-
# Make the right-hand-side a Join with only a single operand, if it's not already and Operator
|
443
|
-
rhs = Algebra::Expression.for(:join, :placeholder, rhs) unless rhs.is_a?(Algebra::Operator)
|
444
|
-
debug("GroupGraphPattern(itr)") {"<= q: #{rhs.inspect}"}
|
445
|
-
debug("GroupGraphPattern(itr)") {"<= lhs: #{lhs ? lhs.inspect : 'nil'}"}
|
446
|
-
lhs ||= Algebra::Operator::BGP.new if rhs.is_a?(Algebra::Operator::LeftJoin)
|
447
|
-
if lhs
|
448
|
-
if rhs.operand(0) == :placeholder
|
449
|
-
rhs.operands[0] = lhs
|
450
|
-
else
|
451
|
-
rhs = Algebra::Operator::Join.new(lhs, rhs)
|
452
|
-
end
|
453
|
-
end
|
454
|
-
lhs = rhs
|
455
|
-
lhs = lhs.operand(1) if lhs.operand(0) == :placeholder
|
456
|
-
debug("GroupGraphPattern(itr)") {"=> lhs: #{lhs.inspect}"}
|
457
|
-
end
|
458
|
-
# Trivial simplification for :join or :union of one query
|
459
|
-
case lhs
|
460
|
-
when Algebra::Operator::Join, Algebra::Operator::Union
|
461
|
-
if lhs.operand(0) == :placeholder
|
462
|
-
lhs = lhs.operand(1)
|
463
|
-
debug("GroupGraphPattern(simplify)") {"=> lhs: #{lhs.inspect}"}
|
464
|
-
end
|
465
|
-
end
|
466
|
-
res = lhs
|
467
|
-
elsif data[:query]
|
468
|
-
res = data[:query].first
|
469
|
-
end
|
470
|
-
|
471
|
-
debug("GroupGraphPattern(pre-filter)") {"res: #{res.inspect}"}
|
472
|
-
|
473
|
-
if data[:filter]
|
474
|
-
expr, query = flatten_filter(data[:filter])
|
475
|
-
query = res || Algebra::Operator::BGP.new
|
476
|
-
# query should be nil
|
477
|
-
res = Algebra::Operator::Filter.new(expr, query)
|
478
|
-
end
|
479
|
-
add_prod_datum(:query, res)
|
480
|
-
}
|
481
|
-
}
|
482
|
-
when :_GraphPatternNotTriples_or_Filter_Dot_Opt_TriplesBlock_Opt
|
483
|
-
# Create a stack of GroupQuerys having a single graph element and resolve in GroupGraphPattern
|
484
|
-
{
|
485
|
-
:finish => lambda { |data|
|
486
|
-
lhs = data[:_GraphPatternNotTriples_or_Filter]
|
487
|
-
rhs = data[:query]
|
488
|
-
add_prod_datum(:query_list, lhs) if lhs
|
489
|
-
rhs = Algebra::Expression.for(:join, :placeholder, rhs.first) if rhs
|
490
|
-
add_prod_data(:query_list, rhs) if rhs
|
491
|
-
add_prod_datum(:filter, data[:filter])
|
492
|
-
}
|
493
|
-
}
|
494
|
-
when :_GraphPatternNotTriples_or_Filter
|
495
|
-
# Create a stack of Single operand Operators and resolve in GroupGraphPattern
|
496
|
-
{
|
497
|
-
:finish => lambda { |data|
|
498
|
-
add_prod_datum(:filter, data[:filter])
|
499
|
-
|
500
|
-
if data[:query]
|
501
|
-
res = data[:query].to_a.first
|
502
|
-
res = Algebra::Expression.for(:join, :placeholder, res) unless res.is_a?(Algebra::Operator)
|
503
|
-
add_prod_data(:_GraphPatternNotTriples_or_Filter, res)
|
504
|
-
end
|
505
|
-
}
|
506
|
-
}
|
507
|
-
when :TriplesBlock
|
508
|
-
# [21] TriplesBlock ::= TriplesSameSubject ( '.' TriplesBlock? )?
|
509
|
-
{
|
510
|
-
:finish => lambda { |data|
|
511
|
-
query = Algebra::Operator::BGP.new
|
512
|
-
data[:pattern].each {|p| query << p}
|
513
|
-
|
514
|
-
# Append triples from ('.' TriplesBlock? )?
|
515
|
-
data[:query].to_a.each {|q| query += q}
|
516
|
-
add_prod_datum(:query, query)
|
517
|
-
}
|
518
|
-
}
|
519
|
-
when :OptionalGraphPattern
|
520
|
-
# [23] OptionalGraphPattern ::= 'OPTIONAL' GroupGraphPattern
|
521
|
-
{
|
522
|
-
:finish => lambda { |data|
|
523
|
-
if data[:query]
|
524
|
-
expr = nil
|
525
|
-
query = data[:query].first
|
526
|
-
if query.is_a?(Algebra::Operator::Filter)
|
527
|
-
# Change to expression on left-join with query element
|
528
|
-
expr, query = query.operands
|
529
|
-
add_prod_data(:query, Algebra::Expression.for(:leftjoin, :placeholder, query, expr))
|
530
|
-
else
|
531
|
-
add_prod_data(:query, Algebra::Expression.for(:leftjoin, :placeholder, query))
|
532
|
-
end
|
533
|
-
end
|
534
|
-
}
|
535
|
-
}
|
536
|
-
when :GraphGraphPattern
|
537
|
-
# [24] GraphGraphPattern ::= 'GRAPH' VarOrIRIref GroupGraphPattern
|
538
|
-
{
|
539
|
-
:finish => lambda { |data|
|
540
|
-
if data[:query]
|
541
|
-
context = (data[:VarOrIRIref]).last
|
542
|
-
bgp = data[:query].first
|
543
|
-
if context
|
544
|
-
add_prod_data(:query, Algebra::Expression.for(:graph, context, bgp))
|
545
|
-
else
|
546
|
-
add_prod_data(:query, bgp)
|
547
|
-
end
|
548
|
-
end
|
549
|
-
}
|
550
|
-
}
|
551
|
-
when :GroupOrUnionGraphPattern
|
552
|
-
# [25] GroupOrUnionGraphPattern ::= GroupGraphPattern ( 'UNION' GroupGraphPattern )*
|
553
|
-
{
|
554
|
-
:finish => lambda { |data|
|
555
|
-
# Iterate through expression to create binary operations
|
556
|
-
res = data[:query].to_a.first
|
557
|
-
if data[:union]
|
558
|
-
while !data[:union].empty?
|
559
|
-
# Join union patterns together as Union operators
|
560
|
-
#puts "res: res: #{res}, input_prod: #{input_prod}, data[:union]: #{data[:union].first}"
|
561
|
-
lhs = res
|
562
|
-
rhs = data[:union].shift
|
563
|
-
res = Algebra::Expression.for(:union, lhs, rhs)
|
564
|
-
end
|
565
|
-
end
|
566
|
-
add_prod_datum(:query, res)
|
567
|
-
}
|
568
|
-
}
|
569
|
-
when :_UNION_GroupGraphPattern_Star
|
570
|
-
{
|
571
|
-
:finish => lambda { |data|
|
572
|
-
# Add [:union rhs] to stack based on ":union"
|
573
|
-
add_prod_data(:union, data[:query].to_a.first)
|
574
|
-
add_prod_data(:union, data[:union].first) if data[:union]
|
575
|
-
}
|
576
|
-
}
|
577
|
-
when :Filter
|
578
|
-
# [26] Filter ::= 'FILTER' Constraint
|
579
|
-
{
|
580
|
-
:finish => lambda { |data| add_prod_datum(:filter, data[:Constraint]) }
|
581
|
-
}
|
582
|
-
when :Constraint
|
583
|
-
# [27] Constraint ::= BrackettedExpression | BuiltInCall | FunctionCall
|
584
|
-
{
|
585
|
-
:finish => lambda { |data|
|
586
|
-
if data[:Expression]
|
587
|
-
# Resolve expression to the point it is either an atom or an s-exp
|
588
|
-
res = data[:Expression].to_a.first
|
589
|
-
add_prod_data(:Constraint, data[:Expression].to_a.first)
|
590
|
-
elsif data[:BuiltInCall]
|
591
|
-
add_prod_datum(:Constraint, data[:BuiltInCall])
|
592
|
-
elsif data[:Function]
|
593
|
-
add_prod_datum(:Constraint, data[:Function])
|
594
|
-
end
|
595
|
-
}
|
596
|
-
}
|
597
|
-
when :FunctionCall
|
598
|
-
# [28] FunctionCall ::= IRIref ArgList
|
599
|
-
{
|
600
|
-
:finish => lambda { |data| add_prod_data(:Function, data[:IRIref] + data[:ArgList]) }
|
601
|
-
}
|
602
|
-
when :ArgList
|
603
|
-
# [29] ArgList ::= ( NIL | '(' Expression ( ',' Expression )* ')' )
|
604
|
-
{
|
605
|
-
:finish => lambda { |data| data.values.each {|v| add_prod_datum(:ArgList, v)} }
|
606
|
-
}
|
607
|
-
when :ConstructTemplate
|
608
|
-
# [30] ConstructTemplate ::= '{' ConstructTriples? '}'
|
609
|
-
{
|
610
|
-
:start => lambda { |data| @nd_var_gen = false}, # Generate BNodes instead of non-distinguished variables
|
611
|
-
:finish => lambda { |data|
|
612
|
-
@nd_var_gen = "0"
|
613
|
-
add_prod_datum(:ConstructTemplate, data[:pattern])
|
614
|
-
add_prod_datum(:ConstructTemplate, data[:ConstructTemplate])
|
615
|
-
}
|
616
|
-
}
|
617
|
-
when :TriplesSameSubject
|
618
|
-
# [32] TriplesSameSubject ::= VarOrTerm PropertyListNotEmpty | TriplesNode PropertyList
|
619
|
-
{
|
620
|
-
:finish => lambda { |data| add_prod_datum(:pattern, data[:pattern]) }
|
621
|
-
}
|
622
|
-
when :PropertyListNotEmpty
|
623
|
-
# [33] PropertyListNotEmpty ::= Verb ObjectList ( ';' ( Verb ObjectList )? )*
|
624
|
-
{
|
625
|
-
:start => lambda {|data|
|
626
|
-
subject = prod_data[:VarOrTerm] || prod_data[:TriplesNode] || prod_data[:GraphNode]
|
627
|
-
error(nil, "Expected VarOrTerm or TriplesNode or GraphNode", :production => :PropertyListNotEmpty) if validate? && !subject
|
628
|
-
data[:Subject] = subject
|
629
|
-
},
|
630
|
-
:finish => lambda {|data| add_prod_datum(:pattern, data[:pattern])}
|
631
|
-
}
|
632
|
-
when :ObjectList
|
633
|
-
# [35] ObjectList ::= Object ( ',' Object )*
|
634
|
-
{
|
635
|
-
:start => lambda { |data|
|
636
|
-
# Called after Verb. The prod_data stack should have Subject and Verb elements
|
637
|
-
data[:Subject] = prod_data[:Subject]
|
638
|
-
error(nil, "Expected Subject", :production => :ObjectList) if validate?
|
639
|
-
error(nil, "Expected Verb", :production => :ObjectList) if validate?
|
640
|
-
data[:Subject] = prod_data[:Subject]
|
641
|
-
data[:Verb] = prod_data[:Verb].to_a.last
|
642
|
-
},
|
643
|
-
:finish => lambda { |data| add_prod_datum(:pattern, data[:pattern]) }
|
644
|
-
}
|
645
|
-
when :Object
|
646
|
-
# [36] Object ::= GraphNode
|
647
|
-
{
|
648
|
-
:finish => lambda { |data|
|
649
|
-
object = data[:VarOrTerm] || data[:TriplesNode] || data[:GraphNode]
|
650
|
-
if object
|
651
|
-
add_pattern(:Object, :subject => prod_data[:Subject], :predicate => prod_data[:Verb], :object => object)
|
652
|
-
add_prod_datum(:pattern, data[:pattern])
|
653
|
-
end
|
654
|
-
}
|
655
|
-
}
|
656
|
-
when :Verb
|
657
|
-
# [37] Verb ::= VarOrIRIref | 'a'
|
658
|
-
{
|
659
|
-
:finish => lambda { |data| data.values.each {|v| add_prod_datum(:Verb, v)} }
|
660
|
-
}
|
661
|
-
when :TriplesNode
|
662
|
-
# [38] TriplesNode ::= Collection | BlankNodePropertyList
|
663
|
-
#
|
664
|
-
# Allocate Blank Node for () or []
|
665
|
-
{
|
666
|
-
:start => lambda { |data| data[:TriplesNode] = gen_node() },
|
667
|
-
:finish => lambda { |data|
|
668
|
-
add_prod_datum(:pattern, data[:pattern])
|
669
|
-
add_prod_datum(:TriplesNode, data[:TriplesNode])
|
670
|
-
}
|
671
|
-
}
|
672
|
-
when :Collection
|
673
|
-
# [40] Collection ::= '(' GraphNode+ ')'
|
674
|
-
{
|
675
|
-
:start => lambda { |data| data[:Collection] = prod_data[:TriplesNode]},
|
676
|
-
:finish => lambda { |data| expand_collection(data) }
|
677
|
-
}
|
678
|
-
when :GraphNode
|
679
|
-
# [41] GraphNode ::= VarOrTerm | TriplesNode
|
680
|
-
{
|
681
|
-
:finish => lambda { |data|
|
682
|
-
term = data[:VarOrTerm] || data[:TriplesNode]
|
683
|
-
add_prod_datum(:pattern, data[:pattern])
|
684
|
-
add_prod_datum(:GraphNode, term)
|
685
|
-
}
|
686
|
-
}
|
687
|
-
when :VarOrTerm
|
688
|
-
# [42] VarOrTerm ::= Var | GraphTerm
|
689
|
-
{
|
690
|
-
:finish => lambda { |data| data.values.each {|v| add_prod_datum(:VarOrTerm, v)} }
|
691
|
-
}
|
692
|
-
when :VarOrIRIref
|
693
|
-
# [43] VarOrIRIref ::= Var | IRIref
|
694
|
-
{
|
695
|
-
:finish => lambda { |data| data.values.each {|v| add_prod_datum(:VarOrIRIref, v)} }
|
696
|
-
}
|
697
|
-
when :GraphTerm
|
698
|
-
# [45] GraphTerm ::= IRIref | RDFLiteral | NumericLiteral | BooleanLiteral | BlankNode | NIL
|
699
|
-
{
|
700
|
-
:finish => lambda { |data|
|
701
|
-
add_prod_datum(:GraphTerm, data[:IRIref] || data[:literal] || data[:BlankNode] || data[:NIL])
|
702
|
-
}
|
703
|
-
}
|
704
|
-
when :Expression
|
705
|
-
# [46] Expression ::= ConditionalOrExpression
|
706
|
-
{
|
707
|
-
:finish => lambda { |data| add_prod_datum(:Expression, data[:Expression]) }
|
708
|
-
}
|
709
|
-
when :ConditionalOrExpression
|
710
|
-
# [47] ConditionalOrExpression ::= ConditionalAndExpression ( '||' ConditionalAndExpression )*
|
711
|
-
{
|
712
|
-
:finish => lambda { |data| add_operator_expressions(:_OR, data) }
|
713
|
-
}
|
714
|
-
when :_OR_ConditionalAndExpression
|
715
|
-
# This part handles the operator and the rhs of a ConditionalAndExpression
|
716
|
-
{
|
717
|
-
:finish => lambda { |data| accumulate_operator_expressions(:ConditionalOrExpression, :_OR, data) }
|
718
|
-
}
|
719
|
-
when :ConditionalAndExpression
|
720
|
-
# [48] ConditionalAndExpression ::= ValueLogical ( '&&' ValueLogical )*
|
721
|
-
{
|
722
|
-
:finish => lambda { |data| add_operator_expressions(:_AND, data) }
|
723
|
-
}
|
724
|
-
when :_AND_ValueLogical_Star
|
725
|
-
# This part handles the operator and the rhs of a ConditionalAndExpression
|
726
|
-
{
|
727
|
-
:finish => lambda { |data| accumulate_operator_expressions(:ConditionalAndExpression, :_AND, data) }
|
728
|
-
}
|
729
|
-
when :RelationalExpression
|
730
|
-
# [50] RelationalExpression ::= NumericExpression (
|
731
|
-
# '=' NumericExpression
|
732
|
-
# | '!=' NumericExpression
|
733
|
-
# | '<' NumericExpression
|
734
|
-
# | '>' NumericExpression
|
735
|
-
# | '<=' NumericExpression
|
736
|
-
# | '>=' NumericExpression )?
|
737
|
-
#
|
738
|
-
{
|
739
|
-
:finish => lambda { |data|
|
740
|
-
if data[:_Compare_Numeric]
|
741
|
-
add_prod_datum(:Expression, Algebra::Expression.for(data[:_Compare_Numeric].insert(1, *data[:Expression])))
|
742
|
-
else
|
743
|
-
# NumericExpression with no comparitor
|
744
|
-
add_prod_datum(:Expression, data[:Expression])
|
745
|
-
end
|
746
|
-
}
|
747
|
-
}
|
748
|
-
when :_Compare_NumericExpression_Opt # ( '=' NumericExpression | '!=' NumericExpression | ... )?
|
749
|
-
# This part handles the operator and the rhs of a RelationalExpression
|
750
|
-
{
|
751
|
-
:finish => lambda { |data|
|
752
|
-
if data[:RelationalExpression]
|
753
|
-
add_prod_datum(:_Compare_Numeric, data[:RelationalExpression] + data[:Expression])
|
754
|
-
end
|
755
|
-
}
|
756
|
-
}
|
757
|
-
when :AdditiveExpression
|
758
|
-
# [52] AdditiveExpression ::= MultiplicativeExpression ( '+' MultiplicativeExpression | '-' MultiplicativeExpression )*
|
759
|
-
{
|
760
|
-
:finish => lambda { |data| add_operator_expressions(:_Add_Sub, data) }
|
761
|
-
}
|
762
|
-
when :_Add_Sub_MultiplicativeExpression_Star # ( '+' MultiplicativeExpression | '-' MultiplicativeExpression | ... )*
|
763
|
-
# This part handles the operator and the rhs of a AdditiveExpression
|
764
|
-
{
|
765
|
-
:finish => lambda { |data| accumulate_operator_expressions(:AdditiveExpression, :_Add_Sub, data) }
|
766
|
-
}
|
767
|
-
when :MultiplicativeExpression
|
768
|
-
# [53] MultiplicativeExpression ::= UnaryExpression ( '*' UnaryExpression | '/' UnaryExpression )*
|
769
|
-
{
|
770
|
-
:finish => lambda { |data| add_operator_expressions(:_Mul_Div, data) }
|
771
|
-
}
|
772
|
-
when :_Mul_Div_UnaryExpression_Star # ( '*' UnaryExpression | '/' UnaryExpression )*
|
773
|
-
# This part handles the operator and the rhs of a MultiplicativeExpression
|
774
|
-
{
|
775
|
-
# Mul or Div with prod_data[:Expression]
|
776
|
-
:finish => lambda { |data| accumulate_operator_expressions(:MultiplicativeExpression, :_Mul_Div, data) }
|
777
|
-
}
|
778
|
-
when :UnaryExpression
|
779
|
-
# [54] UnaryExpression ::= '!' PrimaryExpression | '+' PrimaryExpression | '-' PrimaryExpression | PrimaryExpression
|
780
|
-
{
|
781
|
-
:finish => lambda { |data|
|
782
|
-
case data[:UnaryExpression]
|
783
|
-
when [:"!"]
|
784
|
-
add_prod_datum(:Expression, Algebra::Expression[:not, data[:Expression].first])
|
785
|
-
when [:"-"]
|
786
|
-
e = data[:Expression].first
|
787
|
-
if e.is_a?(RDF::Literal::Numeric)
|
788
|
-
add_prod_datum(:Expression, -e) # Simple optimization to match ARQ generation
|
789
|
-
else
|
790
|
-
add_prod_datum(:Expression, Algebra::Expression[:minus, e])
|
791
|
-
end
|
792
|
-
else
|
793
|
-
add_prod_datum(:Expression, data[:Expression])
|
794
|
-
end
|
795
|
-
}
|
796
|
-
}
|
797
|
-
when :PrimaryExpression
|
798
|
-
# [55] PrimaryExpression ::= BrackettedExpression | BuiltInCall | IRIrefOrFunction | RDFLiteral | NumericLiteral | BooleanLiteral | Var
|
799
|
-
{
|
800
|
-
:finish => lambda { |data|
|
801
|
-
if data[:Expression]
|
802
|
-
add_prod_datum(:Expression, data[:Expression])
|
803
|
-
elsif data[:BuiltInCall]
|
804
|
-
add_prod_datum(:Expression, data[:BuiltInCall])
|
805
|
-
elsif data[:IRIref]
|
806
|
-
add_prod_datum(:Expression, data[:IRIref])
|
807
|
-
elsif data[:Function]
|
808
|
-
add_prod_datum(:Expression, data[:Function]) # Maintain array representation
|
809
|
-
elsif data[:literal]
|
810
|
-
add_prod_datum(:Expression, data[:literal])
|
811
|
-
elsif data[:Var]
|
812
|
-
add_prod_datum(:Expression, data[:Var])
|
813
|
-
end
|
814
|
-
|
815
|
-
add_prod_datum(:UnaryExpression, data[:UnaryExpression]) # Keep track of this for parent UnaryExpression production
|
816
|
-
}
|
817
|
-
}
|
818
|
-
when :BuiltInCall
|
819
|
-
# [57] BuiltInCall ::= 'STR' '(' Expression ')'
|
820
|
-
# | 'LANG' '(' Expression ')'
|
821
|
-
# | 'LANGMATCHES' '(' Expression ',' Expression ')'
|
822
|
-
# | 'DATATYPE' '(' Expression ')'
|
823
|
-
# | 'BOUND' '(' Var ')'
|
824
|
-
# | 'sameTerm' '(' Expression ',' Expression ')'
|
825
|
-
# | 'isIRI' '(' Expression ')'
|
826
|
-
# | 'isURI' '(' Expression ')'
|
827
|
-
# | 'isBLANK' '(' Expression ')'
|
828
|
-
# | 'isLITERAL' '(' Expression ')'
|
829
|
-
# | RegexExpression
|
830
|
-
{
|
831
|
-
:finish => lambda { |data|
|
832
|
-
if data[:regex]
|
833
|
-
add_prod_datum(:BuiltInCall, Algebra::Expression.for(data[:regex].unshift(:regex)))
|
834
|
-
elsif data[:BOUND]
|
835
|
-
add_prod_datum(:BuiltInCall, Algebra::Expression.for(data[:Var].unshift(:bound)))
|
836
|
-
elsif data[:BuiltInCall]
|
837
|
-
add_prod_datum(:BuiltInCall, Algebra::Expression.for(data[:BuiltInCall] + data[:Expression]))
|
838
|
-
end
|
839
|
-
}
|
840
|
-
}
|
841
|
-
when :RegexExpression
|
842
|
-
# [58] RegexExpression ::= 'REGEX' '(' Expression ',' Expression ( ',' Expression )? ')'
|
843
|
-
{
|
844
|
-
:finish => lambda { |data| add_prod_datum(:regex, data[:Expression]) }
|
845
|
-
}
|
846
|
-
when :IRIrefOrFunction
|
847
|
-
# [59] IRIrefOrFunction ::= IRIref ArgList?
|
848
|
-
{
|
849
|
-
:finish => lambda { |data|
|
850
|
-
if data.has_key?(:ArgList)
|
851
|
-
# Function is (func arg1 arg2 ...)
|
852
|
-
add_prod_data(:Function, data[:IRIref] + data[:ArgList])
|
853
|
-
else
|
854
|
-
add_prod_datum(:IRIref, data[:IRIref])
|
855
|
-
end
|
856
|
-
}
|
857
|
-
}
|
858
|
-
when :RDFLiteral
|
859
|
-
# [60] RDFLiteral ::= String ( LANGTAG | ( '^^' IRIref ) )?
|
860
|
-
{
|
861
|
-
:finish => lambda { |data|
|
862
|
-
if data[:string]
|
863
|
-
lit = data.dup
|
864
|
-
str = lit.delete(:string).last
|
865
|
-
lit[:datatype] = lit.delete(:IRIref).last if lit[:IRIref]
|
866
|
-
lit[:language] = lit.delete(:language).last.downcase if lit[:language]
|
867
|
-
add_prod_datum(:literal, RDF::Literal.new(str, lit)) if str
|
868
|
-
end
|
869
|
-
}
|
870
|
-
}
|
871
|
-
when :NumericLiteralPositive
|
872
|
-
# [63] NumericLiteralPositive ::= INTEGER_POSITIVE | DECIMAL_POSITIVE | DOUBLE_POSITIVE
|
873
|
-
{
|
874
|
-
:finish => lambda { |data|
|
875
|
-
num = data.values.flatten.last
|
876
|
-
add_prod_datum(:literal, num.class.new("+#{num.value}"))
|
877
|
-
add_prod_datum(:UnaryExpression, data[:UnaryExpression]) # Keep track of this for parent UnaryExpression production
|
878
|
-
}
|
879
|
-
}
|
880
|
-
when :NumericLiteralNegative
|
881
|
-
# [64] NumericLiteralNegative ::= INTEGER_NEGATIVE | DECIMAL_NEGATIVE | DOUBLE_NEGATIVE
|
882
|
-
{
|
883
|
-
:finish => lambda { |data|
|
884
|
-
num = data.values.flatten.last
|
885
|
-
add_prod_datum(:literal, num.class.new("-#{num.value}"))
|
886
|
-
add_prod_datum(:UnaryExpression, data[:UnaryExpression]) # Keep track of this for parent UnaryExpression production
|
887
|
-
}
|
888
|
-
}
|
889
|
-
when :IRIref
|
890
|
-
# [67] IRIref ::= IRI_REF | PrefixedName
|
891
|
-
{
|
892
|
-
:finish => lambda { |data| add_prod_datum(:IRIref, data[:iri]) }
|
893
|
-
}
|
894
|
-
when :PrefixedName
|
895
|
-
# [68] PrefixedName ::= PNAME_LN | PNAME_NS
|
896
|
-
{
|
897
|
-
:finish => lambda { |data| add_prod_datum(:iri, data[:PrefixedName]) }
|
898
|
-
}
|
899
|
-
end
|
900
|
-
end
|
901
|
-
|
902
|
-
# Start for production
|
903
|
-
def onStart(prod)
|
904
|
-
context = contexts(prod.to_sym)
|
905
|
-
@productions << prod
|
906
|
-
if context
|
907
|
-
# Create a new production data element, potentially allowing handler to customize before pushing on the @prod_data stack
|
908
|
-
progress("#{prod}(:start):#{@prod_data.length}", prod_data)
|
909
|
-
data = {}
|
910
|
-
context[:start].call(data) if context.has_key?(:start)
|
911
|
-
@prod_data << data
|
912
|
-
else
|
913
|
-
progress("#{prod}(:start)", '')
|
914
|
-
end
|
915
|
-
#puts @prod_data.inspect
|
916
|
-
end
|
917
|
-
|
918
|
-
# Finish of production
|
919
|
-
def onFinish
|
920
|
-
prod = @productions.pop()
|
921
|
-
context = contexts(prod.to_sym)
|
922
|
-
if context
|
923
|
-
# Pop production data element from stack, potentially allowing handler to use it
|
924
|
-
data = @prod_data.pop
|
925
|
-
context[:finish].call(data) if context.has_key?(:finish)
|
926
|
-
progress("#{prod}(:finish):#{@prod_data.length}", prod_data, :depth => (@productions.length + 1))
|
927
|
-
else
|
928
|
-
progress("#{prod}(:finish)", '', :depth => (@productions.length + 1))
|
929
|
-
end
|
930
|
-
end
|
931
|
-
|
932
|
-
# Handlers for individual tokens based on production
|
933
|
-
def token_productions(parent_production, production)
|
934
|
-
case parent_production
|
935
|
-
when :_Add_Sub_MultiplicativeExpression_Star
|
936
|
-
case production
|
937
|
-
when :"+", :"-"
|
938
|
-
lambda { |token| add_prod_datum(:AdditiveExpression, production) }
|
939
|
-
end
|
940
|
-
when :UnaryExpression
|
941
|
-
case production
|
942
|
-
when :"!", :"+", :"-"
|
943
|
-
lambda { |token| add_prod_datum(:UnaryExpression, production) }
|
944
|
-
end
|
945
|
-
when :NumericLiteralPositive, :NumericLiteralNegative, :NumericLiteral
|
946
|
-
case production
|
947
|
-
when :"+", :"-"
|
948
|
-
lambda { |token| add_prod_datum(:NumericLiteral, production) }
|
949
|
-
end
|
950
|
-
else
|
951
|
-
# Generic tokens that don't depend on a particular production
|
952
|
-
case production
|
953
|
-
when :a
|
954
|
-
lambda { |token| add_prod_datum(:Verb, RDF_TYPE) }
|
955
|
-
when :ANON
|
956
|
-
lambda { |token| add_prod_datum(:BlankNode, gen_node()) }
|
957
|
-
when :ASC, :DESC
|
958
|
-
lambda { |token| add_prod_datum(:OrderDirection, token.downcase.to_sym) }
|
959
|
-
when :BLANK_NODE_LABEL
|
960
|
-
lambda { |token| add_prod_datum(:BlankNode, gen_node(token)) }
|
961
|
-
when :BooleanLiteral
|
962
|
-
lambda { |token|
|
963
|
-
add_prod_datum(:literal, RDF::Literal.new(token, :datatype => RDF::XSD.boolean))
|
964
|
-
}
|
965
|
-
when :BOUND
|
966
|
-
lambda { |token| add_prod_datum(:BOUND, :bound) }
|
967
|
-
when :DATATYPE
|
968
|
-
lambda { |token| add_prod_datum(:BuiltInCall, :datatype) }
|
969
|
-
when :DECIMAL
|
970
|
-
lambda { |token| add_prod_datum(:literal, RDF::Literal.new(token, :datatype => RDF::XSD.decimal)) }
|
971
|
-
when :DISTINCT, :REDUCED
|
972
|
-
lambda { |token| add_prod_datum(:DISTINCT_REDUCED, token.downcase.to_sym) }
|
973
|
-
when :DOUBLE
|
974
|
-
lambda { |token| add_prod_datum(:literal, RDF::Literal.new(token, :datatype => RDF::XSD.double)) }
|
975
|
-
when :INTEGER
|
976
|
-
lambda { |token| add_prod_datum(:literal, RDF::Literal.new(token, :datatype => RDF::XSD.integer)) }
|
977
|
-
when :IRI_REF
|
978
|
-
lambda { |token| add_prod_datum(:iri, uri(token)) }
|
979
|
-
when :ISBLANK
|
980
|
-
lambda { |token| add_prod_datum(:BuiltInCall, :isBLANK) }
|
981
|
-
when :ISLITERAL
|
982
|
-
lambda { |token| add_prod_datum(:BuiltInCall, :isLITERAL) }
|
983
|
-
when :ISIRI
|
984
|
-
lambda { |token| add_prod_datum(:BuiltInCall, :isIRI) }
|
985
|
-
when :ISURI
|
986
|
-
lambda { |token| add_prod_datum(:BuiltInCall, :isURI) }
|
987
|
-
when :LANG
|
988
|
-
lambda { |token| add_prod_datum(:BuiltInCall, :lang) }
|
989
|
-
when :LANGMATCHES
|
990
|
-
lambda { |token| add_prod_datum(:BuiltInCall, :langMatches) }
|
991
|
-
when :LANGTAG
|
992
|
-
lambda { |token| add_prod_datum(:language, token) }
|
993
|
-
when :NIL
|
994
|
-
lambda { |token| add_prod_datum(:NIL, RDF["nil"]) }
|
995
|
-
when :PNAME_LN
|
996
|
-
lambda { |token| add_prod_datum(:PrefixedName, ns(*token)) }
|
997
|
-
when :PNAME_NS
|
998
|
-
lambda { |token|
|
999
|
-
add_prod_datum(:PrefixedName, ns(token, nil)) # [68] PrefixedName ::= PNAME_LN | PNAME_NS
|
1000
|
-
prod_data[:prefix] = token && token.to_sym # [4] PrefixDecl := 'PREFIX' PNAME_NS IRI_REF";
|
1001
|
-
}
|
1002
|
-
when :STR
|
1003
|
-
lambda { |token| add_prod_datum(:BuiltInCall, :str) }
|
1004
|
-
when :SAMETERM
|
1005
|
-
lambda { |token| add_prod_datum(:BuiltInCall, :sameTerm) }
|
1006
|
-
when :STRING_LITERAL1, :STRING_LITERAL2, :STRING_LITERAL_LONG1, :STRING_LITERAL_LONG2
|
1007
|
-
lambda { |token| add_prod_datum(:string, token) }
|
1008
|
-
when :VAR1, :VAR2 # [44] Var ::= VAR1 | VAR2
|
1009
|
-
lambda { |token| add_prod_datum(:Var, variable(token, true)) }
|
1010
|
-
when :"*", :"/"
|
1011
|
-
lambda { |token| add_prod_datum(:MultiplicativeExpression, production) }
|
1012
|
-
when :"=", :"!=", :"<", :">", :"<=", :">="
|
1013
|
-
lambda { |token| add_prod_datum(:RelationalExpression, production) }
|
1014
|
-
when :"&&"
|
1015
|
-
lambda { |token| add_prod_datum(:ConditionalAndExpression, production) }
|
1016
|
-
when :"||"
|
1017
|
-
lambda { |token| add_prod_datum(:ConditionalOrExpression, production) }
|
1018
|
-
end
|
1019
|
-
end
|
1020
|
-
end
|
1021
|
-
|
1022
|
-
# A token
|
1023
|
-
def onToken(prod, token)
|
1024
|
-
unless @productions.empty?
|
1025
|
-
parentProd = @productions.last
|
1026
|
-
token_production = token_productions(parentProd.to_sym, prod.to_sym)
|
1027
|
-
if token_production
|
1028
|
-
token_production.call(token)
|
1029
|
-
progress("#{prod}<#{parentProd}(:token)", "#{token}: #{prod_data}", :depth => (@productions.length + 1))
|
1030
|
-
else
|
1031
|
-
progress("#{prod}<#{parentProd}(:token)", token, :depth => (@productions.length + 1))
|
1032
|
-
end
|
1033
|
-
else
|
1034
|
-
error("#{parentProd}(:token)", "Token has no parent production", :production => prod)
|
1035
|
-
end
|
1036
|
-
end
|
1037
|
-
|
1038
|
-
# Current ProdData element
|
1039
|
-
def prod_data; @prod_data.last; end
|
1040
|
-
|
1041
|
-
##
|
1042
|
-
# @param [String] node Location
|
1043
|
-
# @param [String] message Error string
|
1044
|
-
# @param [Hash] options
|
1045
|
-
# @option options [URI, #to_s] :production
|
1046
|
-
# @option options [Token] :token
|
1047
|
-
def error(node, message, options = {})
|
1048
|
-
depth = options[:depth] || @productions.length
|
1049
|
-
node ||= options[:production]
|
1050
|
-
raise Error.new("Error on production #{options[:production].inspect}#{' with input ' + options[:token].inspect if options[:token]} at line #{@lineno}: #{message}", options)
|
1051
|
-
end
|
1052
|
-
|
1053
|
-
##
|
1054
|
-
# Progress output when parsing
|
1055
|
-
#
|
1056
|
-
# @param [String] node Location
|
1057
|
-
# @param [String] message
|
1058
|
-
def progress(node, message, options = {})
|
1059
|
-
depth = options[:depth] || @productions.length
|
1060
|
-
$stderr.puts("[#{@lineno}]#{' ' * depth}#{node}: #{message}") if @options[:progress]
|
1061
|
-
end
|
1062
|
-
|
1063
|
-
##
|
1064
|
-
# Progress output when debugging
|
1065
|
-
#
|
1066
|
-
# May be called with node, message and an option hash
|
1067
|
-
# @overload debug(node, message, options)
|
1068
|
-
# @param [String] node processing node
|
1069
|
-
# @param [String] message
|
1070
|
-
# @param [Hash{Symbol => Object}] options
|
1071
|
-
# @option options [Boolean] :debug output debug messages to $stderr
|
1072
|
-
# @option options [Integer] :depth (@productions.length)
|
1073
|
-
# Processing depth for indenting message output.
|
1074
|
-
#
|
1075
|
-
# May be called with node and an option hash
|
1076
|
-
# @overload debug(node, options)
|
1077
|
-
# @param [String] node processing node
|
1078
|
-
# @param [Hash{Symbol => Object}] options
|
1079
|
-
# @option options [Boolean] :debug output debug messages to $stderr
|
1080
|
-
# @option options [Integer] :depth (@productions.length)
|
1081
|
-
# Processing depth for indenting message output.
|
1082
|
-
#
|
1083
|
-
# May be called with only options,
|
1084
|
-
# in which case the block is used to return the output message
|
1085
|
-
# @overload debug(options)
|
1086
|
-
# @param [Hash{Symbol => Object}] options
|
1087
|
-
# @option options [Boolean] :debug output debug messages to $stderr
|
1088
|
-
# @option options [Integer] :depth (@productions.length)
|
1089
|
-
# Processing depth for indenting message output.
|
1090
|
-
#
|
1091
|
-
# @yieldreturn [String]
|
1092
|
-
# appended to message, to allow for lazy-evaulation of message
|
1093
|
-
def debug(*args)
|
1094
|
-
options = args.last.is_a?(Hash) ? args.pop : @options
|
1095
|
-
return unless options[:debug]
|
1096
|
-
message = args.join(": ")
|
1097
|
-
message = message + yield if block_given?
|
1098
|
-
depth = options[:depth] || @productions.length
|
1099
|
-
case options[:debug]
|
1100
|
-
when Array
|
1101
|
-
options[:debug] << "[#{@lineno}]#{' ' * depth}#{message}"
|
1102
|
-
else
|
1103
|
-
$stderr.puts("[#{@lineno}]#{' ' * depth}#{message}")
|
1104
|
-
end
|
1105
|
-
end
|
1106
|
-
|
1107
|
-
# [1] Query ::= Prologue ( SelectQuery | ConstructQuery | DescribeQuery | AskQuery )
|
1108
|
-
#
|
1109
|
-
# Generate an S-Exp for the final query
|
1110
|
-
# Inputs are :BaseDecl, :PrefixDecl, and :query
|
1111
|
-
def finalize_query(data)
|
1112
|
-
return unless data[:query]
|
1113
|
-
|
1114
|
-
query = data[:query].first
|
1115
|
-
|
1116
|
-
query = Algebra::Expression[:prefix, data[:PrefixDecl].first, query] if data[:PrefixDecl]
|
1117
|
-
query = Algebra::Expression[:base, data[:BaseDecl].first, query] if data[:BaseDecl]
|
1118
|
-
add_prod_datum(:query, query)
|
1119
|
-
end
|
1120
|
-
|
1121
|
-
# [40] Collection ::= '(' GraphNode+ ')'
|
1122
|
-
#
|
1123
|
-
# Take collection of objects and create RDF Collection using rdf:first, rdf:rest and rdf:nil
|
1124
|
-
# @param [Hash] data Production Data
|
1125
|
-
def expand_collection(data)
|
1126
|
-
# Add any triples generated from deeper productions
|
1127
|
-
add_prod_datum(:pattern, data[:pattern])
|
1128
|
-
|
1129
|
-
# Create list items for each element in data[:GraphNode]
|
1130
|
-
first = col = data[:Collection]
|
1131
|
-
list = data[:GraphNode].to_a.flatten.compact
|
1132
|
-
last = list.pop
|
1133
|
-
|
1134
|
-
list.each do |r|
|
1135
|
-
add_pattern(:Collection, :subject => first, :predicate => RDF["first"], :object => r)
|
1136
|
-
rest = gen_node()
|
1137
|
-
add_pattern(:Collection, :subject => first, :predicate => RDF["rest"], :object => rest)
|
1138
|
-
first = rest
|
1139
|
-
end
|
1140
|
-
|
1141
|
-
if last
|
1142
|
-
add_pattern(:Collection, :subject => first, :predicate => RDF["first"], :object => last)
|
1143
|
-
end
|
1144
|
-
add_pattern(:Collection, :subject => first, :predicate => RDF["rest"], :object => RDF["nil"])
|
1145
|
-
end
|
1146
|
-
|
1147
|
-
# Class method version to aid in specs
|
1148
|
-
def self.variable(id, distinguished = true)
|
1149
|
-
Parser.new.send(:variable, id, distinguished)
|
1150
|
-
end
|
1151
|
-
|
1152
|
-
def abbr(prodURI)
|
1153
|
-
prodURI.to_s.split('#').last
|
1154
|
-
end
|
1155
|
-
|
1156
|
-
##
|
1157
|
-
# @param [Symbol, String] type_or_value
|
1158
|
-
# @return [Token]
|
1159
|
-
def accept(type_or_value)
|
1160
|
-
if (token = tokens.first) && token === type_or_value
|
1161
|
-
tokens.shift
|
1162
|
-
end
|
1163
|
-
end
|
1164
|
-
|
1165
|
-
##
|
1166
|
-
# @return [void]
|
1167
|
-
def fail
|
1168
|
-
false
|
1169
|
-
end
|
1170
|
-
alias_method :fail!, :fail
|
1171
|
-
|
1172
|
-
# Flatten a Data in form of :filter => [op+ bgp?], without a query into filter and query creating exprlist, if necessary
|
1173
|
-
# @return [Array[:expr, query]]
|
1174
|
-
def flatten_filter(data)
|
1175
|
-
query = data.pop if data.last.respond_to?(:execute)
|
1176
|
-
expr = data.length > 1 ? Algebra::Operator::Exprlist.new(*data) : data.first
|
1177
|
-
[expr, query]
|
1178
|
-
end
|
1179
|
-
|
1180
|
-
# Merge query modifiers, datasets, and projections
|
1181
|
-
def merge_modifiers(data)
|
1182
|
-
query = data[:query] ? data[:query].first : Algebra::Operator::BGP.new
|
1183
|
-
|
1184
|
-
# Add datasets and modifiers in order
|
1185
|
-
query = Algebra::Expression[:order, data[:order].first, query] if data[:order]
|
1186
|
-
|
1187
|
-
query = Algebra::Expression[:project, data[:Var], query] if data[:Var] # project
|
1188
|
-
|
1189
|
-
query = Algebra::Expression[data[:DISTINCT_REDUCED].first, query] if data[:DISTINCT_REDUCED]
|
1190
|
-
|
1191
|
-
query = Algebra::Expression[:slice, data[:slice][0], data[:slice][1], query] if data[:slice]
|
1192
|
-
|
1193
|
-
query = Algebra::Expression[:dataset, data[:dataset], query] if data[:dataset]
|
1194
|
-
|
1195
|
-
query
|
1196
|
-
end
|
1197
|
-
|
1198
|
-
# Add joined expressions in for prod1 (op prod2)* to form (op (op 1 2) 3)
|
1199
|
-
def add_operator_expressions(production, data)
|
1200
|
-
# Iterate through expression to create binary operations
|
1201
|
-
res = data[:Expression]
|
1202
|
-
while data[production] && !data[production].empty?
|
1203
|
-
res = Algebra::Expression[data[production].shift + res + data[production].shift]
|
1204
|
-
end
|
1205
|
-
add_prod_datum(:Expression, res)
|
1206
|
-
end
|
1207
|
-
|
1208
|
-
# Accumulate joined expressions in for prod1 (op prod2)* to form (op (op 1 2) 3)
|
1209
|
-
def accumulate_operator_expressions(operator, production, data)
|
1210
|
-
if data[operator]
|
1211
|
-
# Add [op data] to stack based on "production"
|
1212
|
-
add_prod_datum(production, [data[operator], data[:Expression]])
|
1213
|
-
# Add previous [op data] information
|
1214
|
-
add_prod_datum(production, data[production])
|
1215
|
-
else
|
1216
|
-
# No operator, forward :Expression
|
1217
|
-
add_prod_datum(:Expression, data[:Expression])
|
1218
|
-
end
|
1219
|
-
end
|
1220
|
-
|
1221
|
-
# Add a single value to prod_data, allows for values to be an array
|
1222
|
-
def add_prod_datum(sym, values)
|
1223
|
-
case values
|
1224
|
-
when Array
|
1225
|
-
prod_data[sym] ||= []
|
1226
|
-
debug("add_prod_datum(#{sym})") {"#{prod_data[sym].inspect} += #{values.inspect}"}
|
1227
|
-
prod_data[sym] += values
|
1228
|
-
when nil
|
1229
|
-
return
|
1230
|
-
else
|
1231
|
-
prod_data[sym] ||= []
|
1232
|
-
debug("add_prod_datum(#{sym})") {"#{prod_data[sym].inspect} << #{values.inspect}"}
|
1233
|
-
prod_data[sym] << values
|
1234
|
-
end
|
1235
|
-
end
|
1236
|
-
|
1237
|
-
# Add values to production data, values aranged as an array
|
1238
|
-
def add_prod_data(sym, *values)
|
1239
|
-
return if values.compact.empty?
|
1240
|
-
|
1241
|
-
prod_data[sym] ||= []
|
1242
|
-
prod_data[sym] += values
|
1243
|
-
debug("add_prod_data(#{sym})") {"#{prod_data[sym].inspect} += #{values.inspect}"}
|
1244
|
-
end
|
1245
|
-
|
1246
|
-
# Generate a BNode identifier
|
1247
|
-
def gen_node(id = nil)
|
1248
|
-
if @nd_var_gen
|
1249
|
-
# Use non-distinguished variables within patterns
|
1250
|
-
variable(id, false)
|
1251
|
-
else
|
1252
|
-
unless id
|
1253
|
-
id = @options[:anon_base]
|
1254
|
-
@options[:anon_base] = @options[:anon_base].succ
|
1255
|
-
end
|
1256
|
-
RDF::Node.new(id)
|
1257
|
-
end
|
1258
|
-
end
|
1259
|
-
|
1260
|
-
##
|
1261
|
-
# Return variable allocated to an ID.
|
1262
|
-
# If no ID is provided, a new variable
|
1263
|
-
# is allocated. Otherwise, any previous assignment will be used.
|
1264
|
-
#
|
1265
|
-
# The variable has a #distinguished? method applied depending on if this
|
1266
|
-
# is a disinguished or non-distinguished variable. Non-distinguished
|
1267
|
-
# variables are effectively the same as BNodes.
|
1268
|
-
# @return [RDF::Query::Variable]
|
1269
|
-
def variable(id, distinguished = true)
|
1270
|
-
id = nil if id.to_s.empty?
|
1271
|
-
|
1272
|
-
if id
|
1273
|
-
@vars[id] ||= begin
|
1274
|
-
v = RDF::Query::Variable.new(id)
|
1275
|
-
v.distinguished = distinguished
|
1276
|
-
v
|
1277
|
-
end
|
1278
|
-
else
|
1279
|
-
unless distinguished
|
1280
|
-
# Allocate a non-distinguished variable identifier
|
1281
|
-
id = @nd_var_gen
|
1282
|
-
@nd_var_gen = id.succ
|
1283
|
-
end
|
1284
|
-
v = RDF::Query::Variable.new(id)
|
1285
|
-
v.distinguished = distinguished
|
1286
|
-
v
|
1287
|
-
end
|
1288
|
-
end
|
1289
|
-
|
1290
|
-
# Create URIs
|
1291
|
-
def uri(value)
|
1292
|
-
# If we have a base URI, use that when constructing a new URI
|
1293
|
-
uri = if self.base_uri
|
1294
|
-
u = self.base_uri.join(value.to_s)
|
1295
|
-
u.lexical = "<#{value}>" unless u.to_s == value.to_s || options[:resolve_uris]
|
1296
|
-
u
|
1297
|
-
else
|
1298
|
-
RDF::URI(value)
|
1299
|
-
end
|
1300
|
-
|
1301
|
-
#uri.validate! if validate? && uri.respond_to?(:validate)
|
1302
|
-
#uri.canonicalize! if canonicalize?
|
1303
|
-
#uri = RDF::URI.intern(uri) if intern?
|
1304
|
-
uri
|
1305
|
-
end
|
1306
|
-
|
1307
|
-
def ns(prefix, suffix)
|
1308
|
-
base = prefix(prefix).to_s
|
1309
|
-
suffix = suffix.to_s.sub(/^\#/, "") if base.index("#")
|
1310
|
-
debug {"ns(#{prefix.inspect}): base: '#{base}', suffix: '#{suffix}'"}
|
1311
|
-
uri = uri(base + suffix.to_s)
|
1312
|
-
# Cause URI to be serialized as a lexical
|
1313
|
-
uri.lexical = "#{prefix}:#{suffix}" unless options[:resolve_uris]
|
1314
|
-
uri
|
1315
|
-
end
|
1316
|
-
|
1317
|
-
# add a pattern
|
1318
|
-
#
|
1319
|
-
# @param [String] production Production generating pattern
|
1320
|
-
# @param [Hash{Symbol => Object}] options
|
1321
|
-
def add_pattern(production, options)
|
1322
|
-
progress(production, "add_pattern: #{options.inspect}")
|
1323
|
-
progress(production, "[:pattern, #{options[:subject]}, #{options[:predicate]}, #{options[:object]}]")
|
1324
|
-
triple = {}
|
1325
|
-
options.each_pair do |r, v|
|
1326
|
-
if v.is_a?(Array) && v.flatten.length == 1
|
1327
|
-
v = v.flatten.first
|
1328
|
-
end
|
1329
|
-
if validate? && !v.is_a?(RDF::Term)
|
1330
|
-
error("add_pattern", "Expected #{r} to be a resource, but it was #{v.inspect}",
|
1331
|
-
:production => production)
|
1332
|
-
end
|
1333
|
-
triple[r] = v
|
1334
|
-
end
|
1335
|
-
add_prod_datum(:pattern, RDF::Query::Pattern.new(triple))
|
1336
|
-
end
|
1337
|
-
|
1338
|
-
instance_methods.each { |method| public method } # DEBUG
|
1339
|
-
|
1340
|
-
public
|
1341
|
-
##
|
1342
|
-
# Raised for errors during parsing.
|
1343
|
-
#
|
1344
|
-
# @example Raising a parser error
|
1345
|
-
# raise SPARQL::Grammar::Parser::Error.new(
|
1346
|
-
# "FIXME on line 10",
|
1347
|
-
# :input => query, :production => '%', :lineno => 9)
|
1348
|
-
#
|
1349
|
-
# @see http://ruby-doc.org/core/classes/StandardError.html
|
1350
|
-
class Error < StandardError
|
1351
|
-
##
|
1352
|
-
# The input string associated with the error.
|
1353
|
-
#
|
1354
|
-
# @return [String]
|
1355
|
-
attr_reader :input
|
1356
|
-
|
1357
|
-
##
|
1358
|
-
# The grammar production where the error was found.
|
1359
|
-
#
|
1360
|
-
# @return [String]
|
1361
|
-
attr_reader :production
|
1362
|
-
|
1363
|
-
##
|
1364
|
-
# The line number where the error occurred.
|
1365
|
-
#
|
1366
|
-
# @return [Integer]
|
1367
|
-
attr_reader :lineno
|
1368
|
-
|
1369
|
-
##
|
1370
|
-
# Position within line of error.
|
1371
|
-
#
|
1372
|
-
# @return [Integer]
|
1373
|
-
attr_reader :position
|
1374
|
-
|
1375
|
-
##
|
1376
|
-
# Initializes a new lexer error instance.
|
1377
|
-
#
|
1378
|
-
# @param [String, #to_s] message
|
1379
|
-
# @param [Hash{Symbol => Object}] options
|
1380
|
-
# @option options [String] :input (nil)
|
1381
|
-
# @option options [String] :production (nil)
|
1382
|
-
# @option options [Integer] :lineno (nil)
|
1383
|
-
# @option options [Integer] :position (nil)
|
1384
|
-
def initialize(message, options = {})
|
1385
|
-
@input = options[:input]
|
1386
|
-
@production = options[:production]
|
1387
|
-
@lineno = options[:lineno]
|
1388
|
-
@position = options[:position]
|
1389
|
-
super(message.to_s)
|
1390
|
-
end
|
1391
|
-
end # class Error
|
1392
|
-
end # class Parser
|
1393
|
-
end; end # module SPARQL::Grammar
|