sparql 1.0.2 → 1.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/{README.markdown → README.md} +22 -5
- data/VERSION +1 -1
- data/lib/sparql.rb +1 -2
- data/lib/sparql/algebra/operator.rb +2 -2
- data/lib/sparql/algebra/operator/prefix.rb +8 -0
- data/lib/sparql/grammar.rb +2 -4
- data/lib/sparql/grammar/meta.rb +28543 -0
- data/lib/sparql/grammar/parser11.rb +1304 -0
- data/lib/sparql/grammar/terminals11.rb +186 -0
- metadata +22 -6
- data/lib/sparql/grammar/lexer.rb +0 -613
- data/lib/sparql/grammar/parser.rb +0 -1393
- data/lib/sparql/grammar/parser/meta.rb +0 -1805
@@ -0,0 +1,1304 @@
|
|
1
|
+
require 'ebnf'
|
2
|
+
require 'ebnf/ll1/parser'
|
3
|
+
require 'sparql/grammar/meta'
|
4
|
+
|
5
|
+
module SPARQL::Grammar
|
6
|
+
##
|
7
|
+
# A parser for the SPARQL 1.1 grammar.
|
8
|
+
#
|
9
|
+
# @see http://www.w3.org/TR/rdf-sparql-query/#grammar
|
10
|
+
# @see http://en.wikipedia.org/wiki/LR_parser
|
11
|
+
class Parser
|
12
|
+
include SPARQL::Grammar::Meta
|
13
|
+
include SPARQL::Grammar::Terminals
|
14
|
+
include EBNF::LL1::Parser
|
15
|
+
|
16
|
+
# Builtin functions
|
17
|
+
BUILTINS = %w{
|
18
|
+
ABS BNODE CEIL COALESCE CONCAT
|
19
|
+
CONTAINS DATATYPE DAY ENCODE_FOR_URI EXISTS
|
20
|
+
FLOOR HOURS IF IRI LANGMATCHES LANG LCASE
|
21
|
+
MD5 MINUTES MONTH NOW RAND ROUND SECONDS
|
22
|
+
SHA1 SHA224 SHA256 SHA384 SHA512
|
23
|
+
STRDT STRENDS STRLANG STRLEN STRSTARTS SUBSTR STR
|
24
|
+
TIMEZONE TZ UCASE URI YEAR
|
25
|
+
isBLANK isIRI isURI isLITERAL isNUMERIC sameTerm
|
26
|
+
}.map {|s| s.downcase.to_sym}.freeze
|
27
|
+
|
28
|
+
BUILTIN_RULES = [:regex, :substr, :exists, :not_exists].freeze
|
29
|
+
|
30
|
+
##
|
31
|
+
# Any additional options for the parser.
|
32
|
+
#
|
33
|
+
# @return [Hash]
|
34
|
+
attr_reader :options
|
35
|
+
|
36
|
+
##
|
37
|
+
# The current input string being processed.
|
38
|
+
#
|
39
|
+
# @return [String]
|
40
|
+
attr_accessor :input
|
41
|
+
|
42
|
+
##
|
43
|
+
# The current input tokens being processed.
|
44
|
+
#
|
45
|
+
# @return [Array<Token>]
|
46
|
+
attr_reader :tokens
|
47
|
+
|
48
|
+
##
|
49
|
+
# The internal representation of the result using hierarch of RDF objects and SPARQL::Algebra::Operator
|
50
|
+
# objects.
|
51
|
+
# @return [Array]
|
52
|
+
# @see http://sparql.rubyforge.org/algebra
|
53
|
+
attr_accessor :result
|
54
|
+
|
55
|
+
# Terminals passed to lexer. Order matters!
|
56
|
+
terminal(:ANON, ANON) do |prod, token, input|
|
57
|
+
add_prod_datum(:BlankNode, bnode)
|
58
|
+
end
|
59
|
+
terminal(:NIL, NIL) do |prod, token, input|
|
60
|
+
add_prod_datum(:NIL, RDF['nil'])
|
61
|
+
end
|
62
|
+
terminal(:BLANK_NODE_LABEL, BLANK_NODE_LABEL) do |prod, token, input|
|
63
|
+
add_prod_datum(:BlankNode, bnode(token.value[2..-1]))
|
64
|
+
end
|
65
|
+
terminal(:IRIREF, IRIREF, :unescape => true) do |prod, token, input|
|
66
|
+
begin
|
67
|
+
add_prod_datum(:iri, iri(token.value[1..-2]))
|
68
|
+
rescue ArgumentError => e
|
69
|
+
raise Error, e.message
|
70
|
+
end
|
71
|
+
end
|
72
|
+
terminal(:DOUBLE_POSITIVE, DOUBLE_POSITIVE) do |prod, token, input|
|
73
|
+
# Note that a Turtle Double may begin with a '.[eE]', so tack on a leading
|
74
|
+
# zero if necessary
|
75
|
+
value = token.value.sub(/\.([eE])/, '.0\1')
|
76
|
+
add_prod_datum(:literal, literal(value, :datatype => RDF::XSD.double))
|
77
|
+
end
|
78
|
+
terminal(:DECIMAL_POSITIVE, DECIMAL_POSITIVE) do |prod, token, input|
|
79
|
+
# Note that a Turtle Decimal may begin with a '.', so tack on a leading
|
80
|
+
# zero if necessary
|
81
|
+
value = token.value
|
82
|
+
value = "0#{token.value}" if token.value[0,1] == "."
|
83
|
+
add_prod_datum(:literal, literal(value, :datatype => RDF::XSD.decimal))
|
84
|
+
end
|
85
|
+
terminal(:INTEGER_POSITIVE, INTEGER_POSITIVE) do |prod, token, input|
|
86
|
+
add_prod_datum(:literal, literal(token.value, :datatype => RDF::XSD.integer))
|
87
|
+
end
|
88
|
+
terminal(:DOUBLE_NEGATIVE, DOUBLE_NEGATIVE) do |prod, token, input|
|
89
|
+
# Note that a Turtle Double may begin with a '.[eE]', so tack on a leading
|
90
|
+
# zero if necessary
|
91
|
+
value = token.value.sub(/\.([eE])/, '.0\1')
|
92
|
+
add_prod_datum(:literal, literal(value, :datatype => RDF::XSD.double))
|
93
|
+
end
|
94
|
+
terminal(:DECIMAL_NEGATIVE, DECIMAL_NEGATIVE) do |prod, token, input|
|
95
|
+
# Note that a Turtle Decimal may begin with a '.', so tack on a leading
|
96
|
+
# zero if necessary
|
97
|
+
value = token.value
|
98
|
+
value = "0#{token.value}" if token.value[0,1] == "."
|
99
|
+
add_prod_datum(:literal, literal(value, :datatype => RDF::XSD.decimal))
|
100
|
+
end
|
101
|
+
terminal(:INTEGER_NEGATIVE, INTEGER_NEGATIVE) do |prod, token, input|
|
102
|
+
add_prod_datum(:resource, literal(token.value, :datatype => RDF::XSD.integer))
|
103
|
+
end
|
104
|
+
terminal(:DOUBLE, DOUBLE) do |prod, token, input|
|
105
|
+
# Note that a Turtle Double may begin with a '.[eE]', so tack on a leading
|
106
|
+
# zero if necessary
|
107
|
+
value = token.value.sub(/\.([eE])/, '.0\1')
|
108
|
+
add_prod_datum(:literal, literal(value, :datatype => RDF::XSD.double))
|
109
|
+
end
|
110
|
+
terminal(:DECIMAL, DECIMAL) do |prod, token, input|
|
111
|
+
# Note that a Turtle Decimal may begin with a '.', so tack on a leading
|
112
|
+
# zero if necessary
|
113
|
+
value = token.value
|
114
|
+
#value = "0#{token.value}" if token.value[0,1] == "."
|
115
|
+
add_prod_datum(:literal, literal(value, :datatype => RDF::XSD.decimal))
|
116
|
+
end
|
117
|
+
terminal(:INTEGER, INTEGER) do |prod, token, input|
|
118
|
+
add_prod_datum(:literal, literal(token.value, :datatype => RDF::XSD.integer))
|
119
|
+
end
|
120
|
+
terminal(:LANGTAG, LANGTAG) do |prod, token, input|
|
121
|
+
add_prod_datum(:language, token.value[1..-1])
|
122
|
+
end
|
123
|
+
terminal(:PNAME_LN, PNAME_LN, :unescape => true) do |prod, token, input|
|
124
|
+
prefix, suffix = token.value.split(":", 2)
|
125
|
+
add_prod_datum(:PrefixedName, ns(prefix, suffix))
|
126
|
+
end
|
127
|
+
terminal(:PNAME_NS, PNAME_NS) do |prod, token, input|
|
128
|
+
prefix = token.value[0..-2]
|
129
|
+
# [68] PrefixedName ::= PNAME_LN | PNAME_NS
|
130
|
+
add_prod_datum(:PrefixedName, ns(prefix, nil))
|
131
|
+
# [4] PrefixDecl := 'PREFIX' PNAME_NS IRI_REF";
|
132
|
+
add_prod_datum(:prefix, prefix && prefix.to_sym)
|
133
|
+
end
|
134
|
+
terminal(:STRING_LITERAL_LONG1, STRING_LITERAL_LONG1, :unescape => true) do |prod, token, input|
|
135
|
+
add_prod_datum(:string, token.value[3..-4])
|
136
|
+
end
|
137
|
+
terminal(:STRING_LITERAL_LONG2, STRING_LITERAL_LONG2, :unescape => true) do |prod, token, input|
|
138
|
+
add_prod_datum(:string, token.value[3..-4])
|
139
|
+
end
|
140
|
+
terminal(:STRING_LITERAL1, STRING_LITERAL1, :unescape => true) do |prod, token, input|
|
141
|
+
add_prod_datum(:string, token.value[1..-2])
|
142
|
+
end
|
143
|
+
terminal(:STRING_LITERAL2, STRING_LITERAL2, :unescape => true) do |prod, token, input|
|
144
|
+
add_prod_datum(:string, token.value[1..-2])
|
145
|
+
end
|
146
|
+
terminal(:VAR1, VAR1) do |prod, token, input|
|
147
|
+
add_prod_datum(:Var, variable(token.value[1..-1]))
|
148
|
+
end
|
149
|
+
terminal(:VAR2, VAR2) do |prod, token, input|
|
150
|
+
add_prod_datum(:Var, variable(token.value[1..-1]))
|
151
|
+
end
|
152
|
+
|
153
|
+
# Keyword terminals
|
154
|
+
terminal(nil, STR_EXPR, :map => STR_MAP) do |prod, token, input|
|
155
|
+
case token.value
|
156
|
+
when '+', '-'
|
157
|
+
case prod
|
158
|
+
when :_AdditiveExpression_1, :_AdditiveExpression_4, :_AdditiveExpression_5
|
159
|
+
add_prod_datum(:AdditiveExpression, token.value)
|
160
|
+
when :_UnaryExpression_2, :_UnaryExpression_3
|
161
|
+
add_prod_datum(:UnaryExpression, token.value)
|
162
|
+
else
|
163
|
+
raise "Unexpected production #{prod} for #{token}"
|
164
|
+
end
|
165
|
+
when '*', '/' then add_prod_datum(:MultiplicativeExpression, token.value)
|
166
|
+
when '=', '!=', '<',
|
167
|
+
'>', '<=', '>=' then add_prod_datum(:RelationalExpression, token.value)
|
168
|
+
when '&&' then add_prod_datum(:ConditionalAndExpression, token.value)
|
169
|
+
when '||' then add_prod_datum(:ConditionalOrExpression, token.value)
|
170
|
+
when '!' then add_prod_datum(:UnaryExpression, token.value)
|
171
|
+
when 'a' then add_prod_datum(:Verb, RDF.type)
|
172
|
+
when /true|false/ then add_prod_datum(:literal, RDF::Literal::Boolean.new(token.value.downcase))
|
173
|
+
when /ASC|DESC/ then add_prod_datum(:OrderDirection, token.value.downcase.to_sym)
|
174
|
+
when /DISTINCT|REDUCED/ then add_prod_datum(:DISTINCT_REDUCED, token.value.downcase.to_sym)
|
175
|
+
when %r{
|
176
|
+
ABS|BNODE|BOUND|CEIL|COALESCE|CONCAT
|
177
|
+
|CONTAINS|DATATYPE|DAY|ENCODE_FOR_URI|EXISTS
|
178
|
+
|FLOOR|HOURS|IF|IRI|LANGMATCHES|LANG|LCASE
|
179
|
+
|MD5|MINUTES|MONTH|NOW|RAND|ROUND|SECONDS
|
180
|
+
|SHA1|SHA224|SHA256|SHA384|SHA512
|
181
|
+
|STRDT|STRENDS|STRLANG|STRLEN|STRSTARTS|SUBSTR|STR
|
182
|
+
|TIMEZONE|TZ|UCASE|URI|YEAR
|
183
|
+
|isBLANK|isIRI|isURI|isLITERAL|isNUMERIC|sameTerm
|
184
|
+
}x
|
185
|
+
add_prod_datum(token.value.downcase.to_sym, token.value.downcase.to_sym)
|
186
|
+
else
|
187
|
+
#add_prod_datum(:string, token.value)
|
188
|
+
end
|
189
|
+
end
|
190
|
+
|
191
|
+
# Productions
|
192
|
+
# [2] Query ::= Prologue
|
193
|
+
# ( SelectQuery | ConstructQuery | DescribeQuery | AskQuery ) BindingsClause
|
194
|
+
production(:Query) do |input, data, callback|
|
195
|
+
if data[:query]
|
196
|
+
query = data[:query].first
|
197
|
+
if data[:PrefixDecl]
|
198
|
+
pfx = data[:PrefixDecl].shift
|
199
|
+
data[:PrefixDecl].each {|p| pfx.merge!(p)}
|
200
|
+
pfx.operands[1] = query
|
201
|
+
query = pfx
|
202
|
+
end
|
203
|
+
query = SPARQL::Algebra::Expression[:base, data[:BaseDecl].first, query] if data[:BaseDecl]
|
204
|
+
add_prod_datum(:query, query)
|
205
|
+
end
|
206
|
+
end
|
207
|
+
|
208
|
+
# [4] Prologue ::= ( BaseDecl | PrefixDecl )*
|
209
|
+
production(:Prologue) do |input, data, callback|
|
210
|
+
unless resolve_iris?
|
211
|
+
# Only output if we're not resolving URIs internally
|
212
|
+
add_prod_datum(:BaseDecl, data[:BaseDecl])
|
213
|
+
add_prod_datum(:PrefixDecl, data[:PrefixDecl]) if data[:PrefixDecl]
|
214
|
+
end
|
215
|
+
end
|
216
|
+
|
217
|
+
# [5] BaseDecl ::= 'BASE' IRI_REF
|
218
|
+
production(:BaseDecl) do |input, data, callback|
|
219
|
+
iri = data[:iri].last
|
220
|
+
debug("BaseDecl") {"Defined base as #{iri}"}
|
221
|
+
self.base_uri = iri(iri)
|
222
|
+
add_prod_datum(:BaseDecl, iri) unless resolve_iris?
|
223
|
+
end
|
224
|
+
|
225
|
+
# [6] PrefixDecl ::= 'PREFIX' PNAME_NS IRI_REF
|
226
|
+
production(:PrefixDecl) do |input, data, callback|
|
227
|
+
if data[:iri]
|
228
|
+
pfx = data[:prefix].last
|
229
|
+
self.prefix(pfx, data[:iri].last)
|
230
|
+
prefix_op = SPARQL::Algebra::Operator::Prefix.new([["#{pfx}:".to_sym, data[:iri].last]], [])
|
231
|
+
add_prod_datum(:PrefixDecl, prefix_op)
|
232
|
+
end
|
233
|
+
end
|
234
|
+
|
235
|
+
# [7] SelectQuery ::= SelectClause DatasetClause* WhereClause SolutionModifier
|
236
|
+
production(:SelectQuery) do |input, data, callback|
|
237
|
+
query = merge_modifiers(data)
|
238
|
+
add_prod_datum :query, query
|
239
|
+
end
|
240
|
+
|
241
|
+
# [10] ConstructQuery ::= 'CONSTRUCT'
|
242
|
+
# ( ConstructTemplate DatasetClause* WhereClause SolutionModifier | DatasetClause* 'WHERE' '{' TriplesTemplate? '}' SolutionModifier )
|
243
|
+
production(:ConstructQuery) do |input, data, callback|
|
244
|
+
query = merge_modifiers(data)
|
245
|
+
template = data[:ConstructTemplate] || []
|
246
|
+
add_prod_datum :query, SPARQL::Algebra::Expression[:construct, template, query]
|
247
|
+
end
|
248
|
+
|
249
|
+
# [11] DescribeQuery ::= 'DESCRIBE' ( VarOrIRIref+ | '*' )
|
250
|
+
# DatasetClause* WhereClause? SolutionModifier
|
251
|
+
production(:DescribeQuery) do |input, data, callback|
|
252
|
+
query = merge_modifiers(data)
|
253
|
+
to_describe = data[:VarOrIRIref] || []
|
254
|
+
add_prod_datum :query, SPARQL::Algebra::Expression[:describe, to_describe, query]
|
255
|
+
end
|
256
|
+
|
257
|
+
# [12] AskQuery ::= 'ASK' DatasetClause* WhereClause
|
258
|
+
production(:AskQuery) do |input, data, callback|
|
259
|
+
query = merge_modifiers(data)
|
260
|
+
add_prod_datum :query, SPARQL::Algebra::Expression[:ask, query]
|
261
|
+
end
|
262
|
+
|
263
|
+
# [14] DefaultGraphClause ::= SourceSelector
|
264
|
+
production(:DefaultGraphClause) do |input, data, callback|
|
265
|
+
add_prod_datum :dataset, data[:IRIref]
|
266
|
+
end
|
267
|
+
|
268
|
+
# [15] NamedGraphClause ::= 'NAMED' SourceSelector
|
269
|
+
production(:NamedGraphClause) do |input, data, callback|
|
270
|
+
add_prod_data :dataset, data[:IRIref].unshift(:named)
|
271
|
+
end
|
272
|
+
|
273
|
+
# [18] SolutionModifier ::= GroupClause? HavingClause? OrderClause? LimitOffsetClauses?
|
274
|
+
|
275
|
+
# [19] GroupClause ::= 'GROUP' 'BY' GroupCondition+
|
276
|
+
#production(:GroupClause) do |input, data, callback|
|
277
|
+
#end
|
278
|
+
|
279
|
+
# [20] GroupCondition ::= BuiltInCall | FunctionCall
|
280
|
+
# | '(' Expression ( 'AS' Var )? ')' | Var
|
281
|
+
#production(:GroupClause) do |input, data, callback|
|
282
|
+
#end
|
283
|
+
|
284
|
+
# [21] HavingClause ::= 'HAVING' HavingCondition+
|
285
|
+
#production(:GroupClause) do |input, data, callback|
|
286
|
+
#end
|
287
|
+
|
288
|
+
# [23] OrderClause ::= 'ORDER' 'BY' OrderCondition+
|
289
|
+
production(:OrderClause) do |input, data, callback|
|
290
|
+
if res = data[:OrderCondition]
|
291
|
+
res = [res] if [:asc, :desc].include?(res[0]) # Special case when there's only one condition and it's ASC (x) or DESC (x)
|
292
|
+
add_prod_data :order, res
|
293
|
+
end
|
294
|
+
end
|
295
|
+
|
296
|
+
# [24] OrderCondition ::= ( ( 'ASC' | 'DESC' )
|
297
|
+
# BrackettedExpression )
|
298
|
+
# | ( Constraint | Var )
|
299
|
+
production(:OrderCondition) do |input, data, callback|
|
300
|
+
if data[:OrderDirection]
|
301
|
+
add_prod_datum(:OrderCondition, SPARQL::Algebra::Expression.for(data[:OrderDirection] + data[:Expression]))
|
302
|
+
else
|
303
|
+
add_prod_datum(:OrderCondition, data[:Constraint] || data[:Var])
|
304
|
+
end
|
305
|
+
end
|
306
|
+
|
307
|
+
# [25] LimitOffsetClauses ::= LimitClause OffsetClause?
|
308
|
+
# | OffsetClause LimitClause?
|
309
|
+
production(:LimitOffsetClauses) do |input, data, callback|
|
310
|
+
if data[:limit] || data[:offset]
|
311
|
+
limit = data[:limit] ? data[:limit].last : :_
|
312
|
+
offset = data[:offset] ? data[:offset].last : :_
|
313
|
+
add_prod_data :slice, offset, limit
|
314
|
+
end
|
315
|
+
end
|
316
|
+
|
317
|
+
# [26] LimitClause ::= 'LIMIT' INTEGER
|
318
|
+
production(:LimitClause) do |input, data, callback|
|
319
|
+
add_prod_datum(:limit, data[:literal])
|
320
|
+
end
|
321
|
+
|
322
|
+
# [27] OffsetClause ::= 'OFFSET' INTEGER
|
323
|
+
production(:OffsetClause) do |input, data, callback|
|
324
|
+
add_prod_datum(:offset, data[:literal])
|
325
|
+
end
|
326
|
+
|
327
|
+
# [54] [55] GroupGraphPatternSub ::= TriplesBlock?
|
328
|
+
# ( GraphPatternNotTriples '.'? TriplesBlock? )*
|
329
|
+
production(:GroupGraphPatternSub) do |input, data, callback|
|
330
|
+
query_list = data[:query_list]
|
331
|
+
debug("GroupGraphPatternSub") {"ql #{query_list.to_a.inspect}"}
|
332
|
+
debug("GroupGraphPatternSub") {"q #{data[:query] ? data[:query].first.inspect : 'nil'}"}
|
333
|
+
|
334
|
+
if query_list
|
335
|
+
lhs = data[:query].to_a.first
|
336
|
+
while !query_list.empty?
|
337
|
+
rhs = query_list.shift
|
338
|
+
# Make the right-hand-side a Join with only a single operand, if it's not already and Operator
|
339
|
+
rhs = SPARQL::Algebra::Expression.for(:join, :placeholder, rhs) unless rhs.is_a?(SPARQL::Algebra::Operator)
|
340
|
+
debug("GroupGraphPatternSub") {"<= q: #{rhs.inspect}"}
|
341
|
+
debug("GroupGraphPatternSub") {"<= lhs: #{lhs ? lhs.inspect : 'nil'}"}
|
342
|
+
lhs ||= SPARQL::Algebra::Operator::BGP.new if rhs.is_a?(SPARQL::Algebra::Operator::LeftJoin)
|
343
|
+
if lhs
|
344
|
+
if rhs.operand(0) == :placeholder
|
345
|
+
rhs.operands[0] = lhs
|
346
|
+
else
|
347
|
+
rhs = SPARQL::Algebra::Operator::Join.new(lhs, rhs)
|
348
|
+
end
|
349
|
+
end
|
350
|
+
lhs = rhs
|
351
|
+
lhs = lhs.operand(1) if lhs.operand(0) == :placeholder
|
352
|
+
debug("GroupGraphPatternSub(itr)") {"=> lhs: #{lhs.inspect}"}
|
353
|
+
end
|
354
|
+
# Trivial simplification for :join or :union of one query
|
355
|
+
case lhs
|
356
|
+
when SPARQL::Algebra::Operator::Join, SPARQL::Algebra::Operator::Union
|
357
|
+
if lhs.operand(0) == :placeholder
|
358
|
+
lhs = lhs.operand(1)
|
359
|
+
debug("GroupGraphPatternSub(simplify)") {"=> lhs: #{lhs.inspect}"}
|
360
|
+
end
|
361
|
+
end
|
362
|
+
res = lhs
|
363
|
+
elsif data[:query]
|
364
|
+
res = data[:query].first
|
365
|
+
end
|
366
|
+
|
367
|
+
debug("GroupGraphPatternSub(pre-filter)") {"res: #{res.inspect}"}
|
368
|
+
|
369
|
+
if data[:filter]
|
370
|
+
expr, query = flatten_filter(data[:filter])
|
371
|
+
query = res || SPARQL::Algebra::Operator::BGP.new
|
372
|
+
# query should be nil
|
373
|
+
res = SPARQL::Algebra::Operator::Filter.new(expr, query)
|
374
|
+
end
|
375
|
+
add_prod_datum(:query, res)
|
376
|
+
end
|
377
|
+
|
378
|
+
# _GroupGraphPatternSub_2 ::= ( GraphPatternNotTriples '.'? TriplesBlock? )
|
379
|
+
# Create a stack of GroupQuerys having a single graph element and resolve in GroupGraphPattern
|
380
|
+
production(:_GroupGraphPatternSub_2) do |input, data, callback|
|
381
|
+
lhs = data[:query_list]
|
382
|
+
[data[:query]].flatten.compact.each do |rhs|
|
383
|
+
rhs = SPARQL::Algebra::Expression.for(:join, :placeholder, rhs) if rhs.is_a?(RDF::Query)
|
384
|
+
add_prod_data(:query_list, rhs)
|
385
|
+
end
|
386
|
+
add_prod_datum(:query_list, lhs) if lhs
|
387
|
+
add_prod_datum(:filter, data[:filter])
|
388
|
+
end
|
389
|
+
|
390
|
+
# _GroupGraphPatternSub_3
|
391
|
+
|
392
|
+
# [56] TriplesBlock ::= TriplesSameSubjectPath
|
393
|
+
# ( '.' TriplesBlock? )?
|
394
|
+
production(:TriplesBlock) do |input, data, callback|
|
395
|
+
query = SPARQL::Algebra::Operator::BGP.new
|
396
|
+
data[:pattern].each {|p| query << p}
|
397
|
+
|
398
|
+
# Append triples from ('.' TriplesBlock? )?
|
399
|
+
data[:query].to_a.each {|q| query += q}
|
400
|
+
add_prod_datum(:query, query)
|
401
|
+
end
|
402
|
+
|
403
|
+
# [57] GraphPatternNotTriples ::= GroupOrUnionGraphPattern
|
404
|
+
# | OptionalGraphPattern
|
405
|
+
# | MinusGraphPattern
|
406
|
+
# | GraphGraphPattern
|
407
|
+
# | ServiceGraphPattern
|
408
|
+
# | Filter | Bind
|
409
|
+
production(:GraphPatternNotTriples) do |input, data, callback|
|
410
|
+
add_prod_datum(:filter, data[:filter])
|
411
|
+
|
412
|
+
if data[:query]
|
413
|
+
res = data[:query].to_a.first
|
414
|
+
# FIXME?
|
415
|
+
#res = SPARQL::Algebra::Expression.for(:join, :placeholder, res) unless res.is_a?(SPARQL::Algebra::Operator)
|
416
|
+
add_prod_data(:query, res)
|
417
|
+
end
|
418
|
+
end
|
419
|
+
|
420
|
+
# [58] OptionalGraphPattern ::= 'OPTIONAL' GroupGraphPattern
|
421
|
+
production(:OptionalGraphPattern) do |input, data, callback|
|
422
|
+
if data[:query]
|
423
|
+
expr = nil
|
424
|
+
query = data[:query].first
|
425
|
+
if query.is_a?(SPARQL::Algebra::Operator::Filter)
|
426
|
+
# Change to expression on left-join with query element
|
427
|
+
expr, query = query.operands
|
428
|
+
add_prod_data(:query, SPARQL::Algebra::Expression.for(:leftjoin, :placeholder, query, expr))
|
429
|
+
else
|
430
|
+
add_prod_data(:query, SPARQL::Algebra::Expression.for(:leftjoin, :placeholder, query))
|
431
|
+
end
|
432
|
+
end
|
433
|
+
end
|
434
|
+
|
435
|
+
# [59] GraphGraphPattern ::= 'GRAPH' VarOrIRIref GroupGraphPattern
|
436
|
+
production(:GraphGraphPattern) do |input, data, callback|
|
437
|
+
if data[:query]
|
438
|
+
name = (data[:VarOrIRIref]).last
|
439
|
+
bgp = data[:query].first
|
440
|
+
if name
|
441
|
+
add_prod_data(:query, SPARQL::Algebra::Expression.for(:graph, name, bgp))
|
442
|
+
else
|
443
|
+
add_prod_data(:query, bgp)
|
444
|
+
end
|
445
|
+
end
|
446
|
+
end
|
447
|
+
|
448
|
+
# [63] GroupOrUnionGraphPattern ::= GroupGraphPattern
|
449
|
+
# ( 'UNION' GroupGraphPattern )*
|
450
|
+
production(:GroupOrUnionGraphPattern) do |input, data, callback|
|
451
|
+
res = data[:query].to_a.first
|
452
|
+
if data[:union]
|
453
|
+
while !data[:union].empty?
|
454
|
+
# Join union patterns together as Union operators
|
455
|
+
#puts "res: res: #{res}, input_prod: #{input_prod}, data[:union]: #{data[:union].first}"
|
456
|
+
lhs = res
|
457
|
+
rhs = data[:union].shift
|
458
|
+
res = SPARQL::Algebra::Expression.for(:union, lhs, rhs)
|
459
|
+
end
|
460
|
+
end
|
461
|
+
add_prod_datum(:query, res)
|
462
|
+
end
|
463
|
+
|
464
|
+
# ( 'UNION' GroupGraphPattern )*
|
465
|
+
production(:_GroupOrUnionGraphPattern_1) do |input, data, callback|
|
466
|
+
# Add [:union rhs] to stack based on ":union"
|
467
|
+
add_prod_data(:union, data[:query].to_a.first)
|
468
|
+
add_prod_data(:union, data[:union].first) if data[:union]
|
469
|
+
end
|
470
|
+
|
471
|
+
# [64] Filter ::= 'FILTER' Constraint
|
472
|
+
production(:Filter) do |input, data, callback|
|
473
|
+
add_prod_datum(:filter, data[:Constraint])
|
474
|
+
end
|
475
|
+
|
476
|
+
# [65] Constraint ::= BrackettedExpression | BuiltInCall
|
477
|
+
# | FunctionCall
|
478
|
+
production(:Constraint) do |input, data, callback|
|
479
|
+
if data[:Expression]
|
480
|
+
# Resolve expression to the point it is either an atom or an s-exp
|
481
|
+
add_prod_data(:Constraint, data[:Expression].to_a.first)
|
482
|
+
elsif data[:BuiltInCall]
|
483
|
+
add_prod_datum(:Constraint, data[:BuiltInCall])
|
484
|
+
elsif data[:Function]
|
485
|
+
add_prod_datum(:Constraint, data[:Function])
|
486
|
+
end
|
487
|
+
end
|
488
|
+
|
489
|
+
# [66] FunctionCall ::= IRIref ArgList
|
490
|
+
production(:FunctionCall) do |input, data, callback|
|
491
|
+
add_prod_data(:Function, data[:IRIref] + data[:ArgList])
|
492
|
+
end
|
493
|
+
|
494
|
+
# [67] ArgList ::= NIL
|
495
|
+
# | '(' 'DISTINCT'? Expression ( ',' Expression )* ')'
|
496
|
+
production(:ArgList) do |input, data, callback|
|
497
|
+
data.values.each {|v| add_prod_datum(:ArgList, v)}
|
498
|
+
end
|
499
|
+
|
500
|
+
# [68] ExpressionList ::= NIL
|
501
|
+
# | '(' Expression ( ',' Expression )* ')'
|
502
|
+
production(:ExpressionList) do |input, data, callback|
|
503
|
+
data.values.each {|v| add_prod_datum(:ExpressionList, v)}
|
504
|
+
end
|
505
|
+
|
506
|
+
# [69] ConstructTemplate ::= '{' ConstructTriples? '}'
|
507
|
+
start_production(:ConstructTemplate) do |input, data, callback|
|
508
|
+
# Generate BNodes instead of non-distinguished variables
|
509
|
+
self.nd_var_gen = false
|
510
|
+
end
|
511
|
+
production(:ConstructTemplate) do |input, data, callback|
|
512
|
+
# Generate BNodes instead of non-distinguished variables
|
513
|
+
self.nd_var_gen = "0"
|
514
|
+
add_prod_datum(:ConstructTemplate, data[:pattern])
|
515
|
+
add_prod_datum(:ConstructTemplate, data[:ConstructTemplate])
|
516
|
+
end
|
517
|
+
|
518
|
+
# [71] TriplesSameSubject ::= VarOrTerm PropertyListNotEmpty
|
519
|
+
# | TriplesNode PropertyList
|
520
|
+
production(:TriplesSameSubject) do |input, data, callback|
|
521
|
+
add_prod_datum(:pattern, data[:pattern])
|
522
|
+
end
|
523
|
+
|
524
|
+
# [72] PropertyListNotEmpty ::= Verb ObjectList
|
525
|
+
# ( ';' ( Verb ObjectList )? )*
|
526
|
+
start_production(:PropertyListNotEmpty) do |input, data, callback|
|
527
|
+
subject = input[:VarOrTerm] || input[:TriplesNode] || input[:GraphNode]
|
528
|
+
error(nil, "Expected VarOrTerm or TriplesNode or GraphNode", :production => :PropertyListNotEmpty) if validate? && !subject
|
529
|
+
data[:Subject] = subject
|
530
|
+
end
|
531
|
+
production(:PropertyListNotEmpty) do |input, data, callback|
|
532
|
+
add_prod_datum(:pattern, data[:pattern])
|
533
|
+
end
|
534
|
+
|
535
|
+
# [74] ObjectList ::= Object ( ',' Object )*
|
536
|
+
start_production(:ObjectList) do |input, data, callback|
|
537
|
+
# Called after Verb. The prod_data stack should have Subject and Verb elements
|
538
|
+
data[:Subject] = prod_data[:Subject]
|
539
|
+
error(nil, "Expected Subject", :production => :ObjectList) if !prod_data[:Subject] && validate?
|
540
|
+
error(nil, "Expected Verb", :production => :ObjectList) if !prod_data[:Verb] && validate?
|
541
|
+
data[:Subject] = prod_data[:Subject]
|
542
|
+
data[:Verb] = prod_data[:Verb].to_a.last
|
543
|
+
end
|
544
|
+
production(:ObjectList) do |input, data, callback|
|
545
|
+
add_prod_datum(:pattern, data[:pattern])
|
546
|
+
end
|
547
|
+
|
548
|
+
# [75] Object ::= GraphNode
|
549
|
+
production(:Object) do |input, data, callback|
|
550
|
+
object = data[:VarOrTerm] || data[:TriplesNode] || data[:GraphNode]
|
551
|
+
if object
|
552
|
+
add_pattern(:Object, :subject => prod_data[:Subject], :predicate => prod_data[:Verb], :object => object)
|
553
|
+
add_prod_datum(:pattern, data[:pattern])
|
554
|
+
end
|
555
|
+
end
|
556
|
+
|
557
|
+
# [76] Verb ::= VarOrIRIref | 'a'
|
558
|
+
production(:Verb) do |input, data, callback|
|
559
|
+
data.values.each {|v| add_prod_datum(:Verb, v)}
|
560
|
+
end
|
561
|
+
|
562
|
+
# [78] PropertyListNotEmptyPath ::= ( VerbPath | VerbSimple ) ObjectList ( ';' ( ( VerbPath | VerbSimple ) ObjectList )? )*
|
563
|
+
start_production(:PropertyListNotEmptyPath) do |input, data, callback|
|
564
|
+
subject = input[:VarOrTerm]
|
565
|
+
error(nil, "Expected VarOrTerm", :production => ::PropertyListNotEmptyPath) if validate? && !subject
|
566
|
+
data[:Subject] = subject
|
567
|
+
end
|
568
|
+
production(:PropertyListNotEmptyPath) do |input, data, callback|
|
569
|
+
add_prod_datum(:pattern, data[:pattern])
|
570
|
+
end
|
571
|
+
|
572
|
+
# [80] VerbPath ::= Path
|
573
|
+
production(:VerbPath) do |input, data, callback|
|
574
|
+
data.values.each {|v| add_prod_datum(:Verb, v)}
|
575
|
+
end
|
576
|
+
|
577
|
+
# [81] VerbSimple ::= Var
|
578
|
+
production(:VerbSimple) do |input, data, callback|
|
579
|
+
data.values.each {|v| add_prod_datum(:Verb, v)}
|
580
|
+
end
|
581
|
+
|
582
|
+
# [92] TriplesNode ::= Collection | BlankNodePropertyList
|
583
|
+
start_production(:TriplesNode) do |input, data, callback|
|
584
|
+
# Called after Verb. The prod_data stack should have Subject and Verb elements
|
585
|
+
data[:TriplesNode] = bnode
|
586
|
+
end
|
587
|
+
production(:TriplesNode) do |input, data, callback|
|
588
|
+
add_prod_datum(:pattern, data[:pattern])
|
589
|
+
add_prod_datum(:TriplesNode, data[:TriplesNode])
|
590
|
+
end
|
591
|
+
|
592
|
+
# [94] Collection ::= '(' GraphNode+ ')'
|
593
|
+
start_production(:Collection) do |input, data, callback|
|
594
|
+
# Tells the TriplesNode production to collect and not generate statements
|
595
|
+
data[:Collection] = prod_data[:TriplesNode]
|
596
|
+
end
|
597
|
+
production(:Collection) do |input, data, callback|
|
598
|
+
expand_collection(data)
|
599
|
+
end
|
600
|
+
|
601
|
+
# [95] GraphNode ::= VarOrTerm | TriplesNode
|
602
|
+
production(:GraphNode) do |input, data, callback|
|
603
|
+
term = data[:VarOrTerm] || data[:TriplesNode]
|
604
|
+
add_prod_datum(:pattern, data[:pattern])
|
605
|
+
add_prod_datum(:GraphNode, term)
|
606
|
+
end
|
607
|
+
|
608
|
+
# [96] VarOrTerm ::= Var | GraphTerm
|
609
|
+
production(:VarOrTerm) do |input, data, callback|
|
610
|
+
data.values.each {|v| add_prod_datum(:VarOrTerm, v)}
|
611
|
+
end
|
612
|
+
|
613
|
+
# [97] VarOrIRIref ::= Var | IRIref
|
614
|
+
production(:VarOrIRIref) do |input, data, callback|
|
615
|
+
data.values.each {|v| add_prod_datum(:VarOrIRIref, v)}
|
616
|
+
end
|
617
|
+
|
618
|
+
# [99] GraphTerm ::= IRIref | RDFLiteral | NumericLiteral
|
619
|
+
# | BooleanLiteral | BlankNode | NIL
|
620
|
+
production(:GraphTerm) do |input, data, callback|
|
621
|
+
add_prod_datum(:GraphTerm,
|
622
|
+
data[:IRIref] ||
|
623
|
+
data[:literal] ||
|
624
|
+
data[:BlankNode] ||
|
625
|
+
data[:NIL])
|
626
|
+
end
|
627
|
+
|
628
|
+
# [100] Expression ::= ConditionalOrExpression
|
629
|
+
production(:Expression) do |input, data, callback|
|
630
|
+
add_prod_datum(:Expression, data[:Expression])
|
631
|
+
end
|
632
|
+
|
633
|
+
# [101] ConditionalOrExpression ::= ConditionalAndExpression
|
634
|
+
# ( '||' ConditionalAndExpression )*
|
635
|
+
production(:ConditionalOrExpression) do |input, data, callback|
|
636
|
+
add_operator_expressions(:_OR, data)
|
637
|
+
end
|
638
|
+
|
639
|
+
# ( '||' ConditionalAndExpression )*
|
640
|
+
production(:_ConditionalOrExpression_1) do |input, data, callback|
|
641
|
+
accumulate_operator_expressions(:ConditionalOrExpression, :_OR, data)
|
642
|
+
end
|
643
|
+
|
644
|
+
# [102] ConditionalAndExpression ::= ValueLogical ( '&&' ValueLogical )*
|
645
|
+
production(:ConditionalAndExpression) do |input, data, callback|
|
646
|
+
add_operator_expressions(:_AND, data)
|
647
|
+
end
|
648
|
+
|
649
|
+
# ( '||' ConditionalAndExpression )*
|
650
|
+
production(:_ConditionalAndExpression_1) do |input, data, callback|
|
651
|
+
accumulate_operator_expressions(:ConditionalAndExpression, :_AND, data)
|
652
|
+
end
|
653
|
+
|
654
|
+
# [104] RelationalExpression ::= NumericExpression
|
655
|
+
# ( '=' NumericExpression
|
656
|
+
# | '!=' NumericExpression
|
657
|
+
# | '<' NumericExpression
|
658
|
+
# | '>' NumericExpression
|
659
|
+
# | '<=' NumericExpression
|
660
|
+
# | '>=' NumericExpression
|
661
|
+
# | 'IN' ExpressionList
|
662
|
+
# | 'NOT' 'IN' ExpressionList
|
663
|
+
# )?
|
664
|
+
production(:RelationalExpression) do |input, data, callback|
|
665
|
+
if data[:_Compare_Numeric]
|
666
|
+
add_prod_datum(:Expression, SPARQL::Algebra::Expression.for(data[:_Compare_Numeric].insert(1, *data[:Expression])))
|
667
|
+
else
|
668
|
+
# NumericExpression with no comparitor
|
669
|
+
add_prod_datum(:Expression, data[:Expression])
|
670
|
+
end
|
671
|
+
end
|
672
|
+
|
673
|
+
# ( '=' NumericExpression | '!=' NumericExpression | ... )?
|
674
|
+
production(:_RelationalExpression_1) do |input, data, callback|
|
675
|
+
if data[:RelationalExpression]
|
676
|
+
add_prod_datum(:_Compare_Numeric, data[:RelationalExpression] + data[:Expression])
|
677
|
+
end
|
678
|
+
end
|
679
|
+
|
680
|
+
# [106] AdditiveExpression ::= MultiplicativeExpression
|
681
|
+
# ( '+' MultiplicativeExpression
|
682
|
+
# | '-' MultiplicativeExpression
|
683
|
+
# | ( NumericLiteralPositive | NumericLiteralNegative )
|
684
|
+
# ( ( '*' UnaryExpression )
|
685
|
+
# | ( '/' UnaryExpression ) )?
|
686
|
+
# )*
|
687
|
+
production(:AdditiveExpression) do |input, data, callback|
|
688
|
+
add_operator_expressions(:_Add_Sub, data)
|
689
|
+
end
|
690
|
+
|
691
|
+
# ( '+' MultiplicativeExpression
|
692
|
+
# | '-' MultiplicativeExpression
|
693
|
+
# | ( NumericLiteralPositive | NumericLiteralNegative )
|
694
|
+
# ( ( '*' UnaryExpression )
|
695
|
+
# | ( '/' UnaryExpression ) )?
|
696
|
+
# )*
|
697
|
+
production(:_AdditiveExpression_1) do |input, data, callback|
|
698
|
+
accumulate_operator_expressions(:AdditiveExpression, :_Add_Sub, data)
|
699
|
+
end
|
700
|
+
|
701
|
+
# | ( NumericLiteralPositive | NumericLiteralNegative )
|
702
|
+
production(:_AdditiveExpression_7) do |input, data, callback|
|
703
|
+
val = data[:literal].first.to_s
|
704
|
+
op, val = val[0,1], val[1..-1]
|
705
|
+
add_prod_datum(:AdditiveExpression, op)
|
706
|
+
add_prod_datum(:Expression, data[:literal])
|
707
|
+
end
|
708
|
+
|
709
|
+
# [107] MultiplicativeExpression ::= UnaryExpression
|
710
|
+
# ( '*' UnaryExpression
|
711
|
+
# | '/' UnaryExpression )*
|
712
|
+
production(:MultiplicativeExpression) do |input, data, callback|
|
713
|
+
add_operator_expressions(:_Mul_Div, data)
|
714
|
+
end
|
715
|
+
|
716
|
+
# ( '*' UnaryExpression
|
717
|
+
# | '/' UnaryExpression )*
|
718
|
+
production(:_MultiplicativeExpression_1) do |input, data, callback|
|
719
|
+
accumulate_operator_expressions(:MultiplicativeExpression, :_Mul_Div, data)
|
720
|
+
end
|
721
|
+
|
722
|
+
# [108] UnaryExpression ::= '!' PrimaryExpression
|
723
|
+
# | '+' PrimaryExpression
|
724
|
+
# | '-' PrimaryExpression
|
725
|
+
# | PrimaryExpression
|
726
|
+
production(:UnaryExpression) do |input, data, callback|
|
727
|
+
case data[:UnaryExpression]
|
728
|
+
when ["!"]
|
729
|
+
add_prod_datum(:Expression, SPARQL::Algebra::Expression[:not, data[:Expression].first])
|
730
|
+
when ["-"]
|
731
|
+
e = data[:Expression].first
|
732
|
+
if e.is_a?(RDF::Literal::Numeric)
|
733
|
+
add_prod_datum(:Expression, -e) # Simple optimization to match ARQ generation
|
734
|
+
else
|
735
|
+
add_prod_datum(:Expression, SPARQL::Algebra::Expression[:minus, e])
|
736
|
+
end
|
737
|
+
else
|
738
|
+
add_prod_datum(:Expression, data[:Expression])
|
739
|
+
end
|
740
|
+
end
|
741
|
+
|
742
|
+
# [109] PrimaryExpression ::= BrackettedExpression | BuiltInCall
|
743
|
+
# | IRIrefOrFunction | RDFLiteral
|
744
|
+
# | NumericLiteral | BooleanLiteral
|
745
|
+
# | Var | Aggregate
|
746
|
+
production(:PrimaryExpression) do |input, data, callback|
|
747
|
+
if data[:Expression]
|
748
|
+
add_prod_datum(:Expression, data[:Expression])
|
749
|
+
elsif data[:BuiltInCall]
|
750
|
+
add_prod_datum(:Expression, data[:BuiltInCall])
|
751
|
+
elsif data[:IRIref]
|
752
|
+
add_prod_datum(:Expression, data[:IRIref])
|
753
|
+
elsif data[:Function]
|
754
|
+
add_prod_datum(:Expression, data[:Function]) # Maintain array representation
|
755
|
+
elsif data[:literal]
|
756
|
+
add_prod_datum(:Expression, data[:literal])
|
757
|
+
elsif data[:Var]
|
758
|
+
add_prod_datum(:Expression, data[:Var])
|
759
|
+
end
|
760
|
+
|
761
|
+
# Keep track of this for parent UnaryExpression production
|
762
|
+
add_prod_datum(:UnaryExpression, data[:UnaryExpression])
|
763
|
+
end
|
764
|
+
|
765
|
+
# [111] BuiltInCall ::= 'STR' '(' Expression ')'
|
766
|
+
# | 'LANG' '(' Expression ')'
|
767
|
+
# | 'LANGMATCHES' '(' Expression ',' Expression ')'
|
768
|
+
# | 'DATATYPE' '(' Expression ')'
|
769
|
+
# | 'BOUND' '(' Var ')'
|
770
|
+
# | 'IRI' '(' Expression ')'
|
771
|
+
# | 'URI' '(' Expression ')'
|
772
|
+
# | 'BNODE' ( '(' Expression ')' | NIL )
|
773
|
+
# | 'RAND' NIL
|
774
|
+
# | 'ABS' '(' Expression ')'
|
775
|
+
# | 'CEIL' '(' Expression ')'
|
776
|
+
# | 'FLOOR' '(' Expression ')'
|
777
|
+
# | 'ROUND' '(' Expression ')'
|
778
|
+
# | 'CONCAT' ExpressionList
|
779
|
+
# | SubstringExpression
|
780
|
+
# | 'STRLEN' '(' Expression ')'
|
781
|
+
# | 'UCASE' '(' Expression ')'
|
782
|
+
# | 'LCASE' '(' Expression ')'
|
783
|
+
# | 'ENCODE_FOR_URI' '(' Expression ')'
|
784
|
+
# | 'CONTAINS' '(' Expression ',' Expression ')'
|
785
|
+
# | 'STRSTARTS' '(' Expression ',' Expression ')'
|
786
|
+
# | 'STRENDS' '(' Expression ',' Expression ')'
|
787
|
+
# | 'YEAR' '(' Expression ')'
|
788
|
+
# | 'MONTH' '(' Expression ')'
|
789
|
+
# | 'DAY' '(' Expression ')'
|
790
|
+
# | 'HOURS' '(' Expression ')'
|
791
|
+
# | 'MINUTES' '(' Expression ')'
|
792
|
+
# | 'SECONDS' '(' Expression ')'
|
793
|
+
# | 'TIMEZONE' '(' Expression ')'
|
794
|
+
# | 'TZ' '(' Expression ')'
|
795
|
+
# | 'NOW' NIL
|
796
|
+
# | 'MD5' '(' Expression ')'
|
797
|
+
# | 'SHA1' '(' Expression ')'
|
798
|
+
# | 'SHA224' '(' Expression ')'
|
799
|
+
# | 'SHA256' '(' Expression ')'
|
800
|
+
# | 'SHA384' '(' Expression ')'
|
801
|
+
# | 'SHA512' '(' Expression ')'
|
802
|
+
# | 'COALESCE' ExpressionList
|
803
|
+
# | 'IF' '(' Expression ',' Expression ',' Expression ')'
|
804
|
+
# | 'STRLANG' '(' Expression ',' Expression ')'
|
805
|
+
# | 'STRDT' '(' Expression ',' Expression ')'
|
806
|
+
# | 'sameTerm' '(' Expression ',' Expression ')'
|
807
|
+
# | 'isIRI' '(' Expression ')'
|
808
|
+
# | 'isURI' '(' Expression ')'
|
809
|
+
# | 'isBLANK' '(' Expression ')'
|
810
|
+
# | 'isLITERAL' '(' Expression ')'
|
811
|
+
# | 'isNUMERIC' '(' Expression ')'
|
812
|
+
# | RegexExpression
|
813
|
+
# | ExistsFunc
|
814
|
+
# | NotExistsFunc
|
815
|
+
production(:BuiltInCall) do |input, data, callback|
|
816
|
+
if builtin = data.keys.detect {|k| BUILTINS.include?(k)}
|
817
|
+
add_prod_datum(:BuiltInCall, SPARQL::Algebra::Expression.for(data[:Expression].unshift(builtin)))
|
818
|
+
elsif builtin_rule = data.keys.detect {|k| BUILTIN_RULES.include?(k)}
|
819
|
+
add_prod_datum(:BuiltInCall, SPARQL::Algebra::Expression.for(data[builtin_rule].unshift(builtin_rule)))
|
820
|
+
elsif data[:bound]
|
821
|
+
add_prod_datum(:BuiltInCall, SPARQL::Algebra::Expression.for(data[:Var].unshift(:bound)))
|
822
|
+
elsif data[:BuiltInCall]
|
823
|
+
add_prod_datum(:BuiltInCall, SPARQL::Algebra::Expression.for(data[:BuiltInCall] + data[:Expression]))
|
824
|
+
end
|
825
|
+
end
|
826
|
+
|
827
|
+
# [112] RegexExpression ::= 'REGEX' '(' Expression ',' Expression
|
828
|
+
# ( ',' Expression )? ')'
|
829
|
+
production(:RegexExpression) do |input, data, callback|
|
830
|
+
add_prod_datum(:regex, data[:Expression])
|
831
|
+
end
|
832
|
+
|
833
|
+
# [113] SubstringExpression ::= 'SUBSTR'
|
834
|
+
# '(' Expression ',' Expression
|
835
|
+
# ( ',' Expression )? ')'
|
836
|
+
production(:SubstringExpression) do |input, data, callback|
|
837
|
+
add_prod_datum(:substr, data[:Expression])
|
838
|
+
end
|
839
|
+
|
840
|
+
# [114] ExistsFunc ::= 'EXISTS' GroupGraphPattern
|
841
|
+
production(:ExistsFunc) do |input, data, callback|
|
842
|
+
add_prod_datum(:exists, data[:query])
|
843
|
+
end
|
844
|
+
|
845
|
+
# [115] NotExistsFunc ::= 'NOT' 'EXISTS' GroupGraphPattern
|
846
|
+
production(:NotExistsFunc) do |input, data, callback|
|
847
|
+
add_prod_datum(:not_exists, data[:query])
|
848
|
+
end
|
849
|
+
|
850
|
+
# [117] IRIrefOrFunction ::= IRIref ArgList?
|
851
|
+
production(:IRIrefOrFunction) do |input, data, callback|
|
852
|
+
if data.has_key?(:ArgList)
|
853
|
+
# Function is (func arg1 arg2 ...)
|
854
|
+
add_prod_data(:Function, data[:IRIref] + data[:ArgList])
|
855
|
+
else
|
856
|
+
add_prod_datum(:IRIref, data[:IRIref])
|
857
|
+
end
|
858
|
+
end
|
859
|
+
|
860
|
+
# [118] RDFLiteral ::= String ( LANGTAG | ( '^^' IRIref ) )?
|
861
|
+
production(:RDFLiteral) do |input, data, callback|
|
862
|
+
if data[:string]
|
863
|
+
lit = data.dup
|
864
|
+
str = lit.delete(:string).last
|
865
|
+
lit[:datatype] = lit.delete(:IRIref).last if lit[:IRIref]
|
866
|
+
lit[:language] = lit.delete(:language).last.downcase if lit[:language]
|
867
|
+
add_prod_datum(:literal, RDF::Literal.new(str, lit)) if str
|
868
|
+
end
|
869
|
+
end
|
870
|
+
|
871
|
+
# [121] NumericLiteralPositive ::= INTEGER_POSITIVE
|
872
|
+
# | DECIMAL_POSITIVE
|
873
|
+
# | DOUBLE_POSITIVE
|
874
|
+
production(:NumericLiteralPositive) do |input, data, callback|
|
875
|
+
num = data.values.flatten.last
|
876
|
+
add_prod_datum(:literal, num)
|
877
|
+
|
878
|
+
# Keep track of this for parent UnaryExpression production
|
879
|
+
add_prod_datum(:UnaryExpression, data[:UnaryExpression])
|
880
|
+
end
|
881
|
+
|
882
|
+
# [122] NumericLiteralNegative ::= INTEGER_NEGATIVE
|
883
|
+
# | DECIMAL_NEGATIVE
|
884
|
+
# | DOUBLE_NEGATIVE
|
885
|
+
production(:NumericLiteralNegative) do |input, data, callback|
|
886
|
+
num = data.values.flatten.last
|
887
|
+
add_prod_datum(:literal, num)
|
888
|
+
|
889
|
+
# Keep track of this for parent UnaryExpression production
|
890
|
+
add_prod_datum(:UnaryExpression, data[:UnaryExpression])
|
891
|
+
end
|
892
|
+
|
893
|
+
# [125] IRIref ::= IRI_REF | PrefixedName
|
894
|
+
production(:IRIref) do |input, data, callback|
|
895
|
+
add_prod_datum(:IRIref, data[:iri])
|
896
|
+
end
|
897
|
+
|
898
|
+
# [126] PrefixedName ::= PNAME_LN | PNAME_NS
|
899
|
+
production(:PrefixedName) do |input, data, callback|
|
900
|
+
add_prod_datum(:iri, data[:PrefixedName])
|
901
|
+
end
|
902
|
+
|
903
|
+
##
|
904
|
+
# Initializes a new parser instance.
|
905
|
+
#
|
906
|
+
# @param [String, #to_s] input
|
907
|
+
# @param [Hash{Symbol => Object}] options
|
908
|
+
# @option options [Hash] :prefixes (Hash.new)
|
909
|
+
# the prefix mappings to use (for acessing intermediate parser productions)
|
910
|
+
# @option options [#to_s] :base_uri (nil)
|
911
|
+
# the base URI to use when resolving relative URIs (for acessing intermediate parser productions)
|
912
|
+
# @option options [#to_s] :anon_base ("b0")
|
913
|
+
# Basis for generating anonymous Nodes
|
914
|
+
# @option options [Boolean] :resolve_iris (false)
|
915
|
+
# Resolve prefix and relative IRIs, otherwise, when serializing the parsed SSE
|
916
|
+
# as S-Expressions, use the original prefixed and relative URIs along with `base` and `prefix`
|
917
|
+
# definitions.
|
918
|
+
# @option options [Boolean] :validate (false)
|
919
|
+
# whether to validate the parsed statements and values
|
920
|
+
# @option options [Boolean] :progress
|
921
|
+
# Show progress of parser productions
|
922
|
+
# @option options [Boolean] :debug
|
923
|
+
# Detailed debug output
|
924
|
+
# @return [SPARQL::Grammar::Parser]
|
925
|
+
def initialize(input = nil, options = {})
|
926
|
+
@input = input.to_s.dup
|
927
|
+
@input.force_encoding(Encoding::UTF_8) if @input.respond_to?(:force_encoding)
|
928
|
+
@options = {:anon_base => "b0", :validate => false}.merge(options)
|
929
|
+
|
930
|
+
debug("base IRI") {base_uri.inspect}
|
931
|
+
debug("validate") {validate?.inspect}
|
932
|
+
|
933
|
+
@vars = {}
|
934
|
+
@nd_var_gen = "0"
|
935
|
+
|
936
|
+
if block_given?
|
937
|
+
case block.arity
|
938
|
+
when 0 then instance_eval(&block)
|
939
|
+
else block.call(self)
|
940
|
+
end
|
941
|
+
end
|
942
|
+
end
|
943
|
+
|
944
|
+
##
|
945
|
+
# Returns `true` if the input string is syntactically valid.
|
946
|
+
#
|
947
|
+
# @return [Boolean]
|
948
|
+
def valid?
|
949
|
+
parse
|
950
|
+
rescue Error
|
951
|
+
false
|
952
|
+
end
|
953
|
+
|
954
|
+
# @return [String]
|
955
|
+
def to_sxp_bin
|
956
|
+
@result
|
957
|
+
end
|
958
|
+
|
959
|
+
def to_s
|
960
|
+
@result.to_sxp
|
961
|
+
end
|
962
|
+
|
963
|
+
alias_method :ll1_parse, :parse
|
964
|
+
|
965
|
+
# Parse query
|
966
|
+
#
|
967
|
+
# The result is a SPARQL Algebra S-List. Productions return an array such as the following:
|
968
|
+
#
|
969
|
+
# (prefix ((: <http://example/>))
|
970
|
+
# (union
|
971
|
+
# (bgp (triple ?s ?p ?o))
|
972
|
+
# (graph ?g
|
973
|
+
# (bgp (triple ?s ?p ?o)))))
|
974
|
+
#
|
975
|
+
# @param [Symbol, #to_s] prod The starting production for the parser.
|
976
|
+
# It may be a URI from the grammar, or a symbol representing the local_name portion of the grammar URI.
|
977
|
+
# @return [Array]
|
978
|
+
# @see http://www.w3.org/2001/sw/DataAccess/rq23/rq24-algebra.html
|
979
|
+
# @see http://axel.deri.ie/sparqltutorial/ESWC2007_SPARQL_Tutorial_unit2b.pdf
|
980
|
+
def parse(prod = START)
|
981
|
+
ll1_parse(@input, prod.to_sym, @options.merge(:branch => BRANCH,
|
982
|
+
:first => FIRST,
|
983
|
+
:follow => FOLLOW)
|
984
|
+
) {}
|
985
|
+
|
986
|
+
# The last thing on the @prod_data stack is the result
|
987
|
+
@result = case
|
988
|
+
when !prod_data.is_a?(Hash)
|
989
|
+
prod_data
|
990
|
+
when prod_data.empty?
|
991
|
+
nil
|
992
|
+
when prod_data[:query]
|
993
|
+
prod_data[:query].to_a.length == 1 ? prod_data[:query].first : prod_data[:query]
|
994
|
+
else
|
995
|
+
key = prod_data.keys.first
|
996
|
+
[key] + prod_data[key] # Creates [:key, [:triple], ...]
|
997
|
+
end
|
998
|
+
end
|
999
|
+
|
1000
|
+
private
|
1001
|
+
##
|
1002
|
+
# Returns the URI prefixes currently defined for this parser.
|
1003
|
+
#
|
1004
|
+
# @example
|
1005
|
+
# prefixes[:dc] #=> RDF::URI('http://purl.org/dc/terms/')
|
1006
|
+
#
|
1007
|
+
# @return [Hash{Symbol => RDF::URI}]
|
1008
|
+
# @since 0.3.0
|
1009
|
+
def prefixes
|
1010
|
+
@options[:prefixes] ||= {}
|
1011
|
+
end
|
1012
|
+
|
1013
|
+
##
|
1014
|
+
# Defines the given URI prefixes for this parser.
|
1015
|
+
#
|
1016
|
+
# @example
|
1017
|
+
# prefixes = {
|
1018
|
+
# :dc => RDF::URI('http://purl.org/dc/terms/'),
|
1019
|
+
# }
|
1020
|
+
#
|
1021
|
+
# @param [Hash{Symbol => RDF::URI}] prefixes
|
1022
|
+
# @return [Hash{Symbol => RDF::URI}]
|
1023
|
+
# @since 0.3.0
|
1024
|
+
def prefixes=(prefixes)
|
1025
|
+
@options[:prefixes] = prefixes
|
1026
|
+
end
|
1027
|
+
|
1028
|
+
##
|
1029
|
+
# Defines the given named URI prefix for this parser.
|
1030
|
+
#
|
1031
|
+
# @example Defining a URI prefix
|
1032
|
+
# prefix :dc, RDF::URI('http://purl.org/dc/terms/')
|
1033
|
+
#
|
1034
|
+
# @example Returning a URI prefix
|
1035
|
+
# prefix(:dc) #=> RDF::URI('http://purl.org/dc/terms/')
|
1036
|
+
#
|
1037
|
+
# @overload prefix(name, uri)
|
1038
|
+
# @param [Symbol, #to_s] name
|
1039
|
+
# @param [RDF::URI, #to_s] uri
|
1040
|
+
#
|
1041
|
+
# @overload prefix(name)
|
1042
|
+
# @param [Symbol, #to_s] name
|
1043
|
+
#
|
1044
|
+
# @return [RDF::URI]
|
1045
|
+
def prefix(name, iri = nil)
|
1046
|
+
name = name.to_s.empty? ? nil : (name.respond_to?(:to_sym) ? name.to_sym : name.to_s.to_sym)
|
1047
|
+
iri.nil? ? prefixes[name] : prefixes[name] = iri
|
1048
|
+
end
|
1049
|
+
|
1050
|
+
##
|
1051
|
+
# Returns the Base URI defined for the parser,
|
1052
|
+
# as specified or when parsing a BASE prologue element.
|
1053
|
+
#
|
1054
|
+
# @example
|
1055
|
+
# base #=> RDF::URI('http://example.com/')
|
1056
|
+
#
|
1057
|
+
# @return [HRDF::URI]
|
1058
|
+
def base_uri
|
1059
|
+
RDF::URI(@options[:base_uri])
|
1060
|
+
end
|
1061
|
+
|
1062
|
+
##
|
1063
|
+
# Set the Base URI to use for this parser.
|
1064
|
+
#
|
1065
|
+
# @param [RDF::URI, #to_s] iri
|
1066
|
+
#
|
1067
|
+
# @example
|
1068
|
+
# base_uri = RDF::URI('http://purl.org/dc/terms/')
|
1069
|
+
#
|
1070
|
+
# @return [RDF::URI]
|
1071
|
+
def base_uri=(iri)
|
1072
|
+
@options[:base_uri] = RDF::URI(iri)
|
1073
|
+
end
|
1074
|
+
|
1075
|
+
##
|
1076
|
+
# Returns `true` if parsed statements and values should be validated.
|
1077
|
+
#
|
1078
|
+
# @return [Boolean] `true` or `false`
|
1079
|
+
# @since 0.3.0
|
1080
|
+
def resolve_iris?
|
1081
|
+
@options[:resolve_iris]
|
1082
|
+
end
|
1083
|
+
|
1084
|
+
##
|
1085
|
+
# Returns `true` when resolving IRIs, otherwise BASE and PREFIX are retained in the output algebra.
|
1086
|
+
#
|
1087
|
+
# @return [Boolean] `true` or `false`
|
1088
|
+
# @since 1.0.3
|
1089
|
+
def validate?
|
1090
|
+
@options[:validate]
|
1091
|
+
end
|
1092
|
+
|
1093
|
+
# Used for generating BNode labels
|
1094
|
+
attr_accessor :nd_var_gen
|
1095
|
+
|
1096
|
+
# Generate a BNode identifier
|
1097
|
+
def bnode(id = nil)
|
1098
|
+
if @nd_var_gen
|
1099
|
+
# Use non-distinguished variables within patterns
|
1100
|
+
variable(id, false)
|
1101
|
+
else
|
1102
|
+
unless id
|
1103
|
+
id = @options[:anon_base]
|
1104
|
+
@options[:anon_base] = @options[:anon_base].succ
|
1105
|
+
end
|
1106
|
+
RDF::Node.new(id)
|
1107
|
+
end
|
1108
|
+
end
|
1109
|
+
|
1110
|
+
##
|
1111
|
+
# Return variable allocated to an ID.
|
1112
|
+
# If no ID is provided, a new variable
|
1113
|
+
# is allocated. Otherwise, any previous assignment will be used.
|
1114
|
+
#
|
1115
|
+
# The variable has a #distinguished? method applied depending on if this
|
1116
|
+
# is a disinguished or non-distinguished variable. Non-distinguished
|
1117
|
+
# variables are effectively the same as BNodes.
|
1118
|
+
# @return [RDF::Query::Variable]
|
1119
|
+
def variable(id, distinguished = true)
|
1120
|
+
id = nil if id.to_s.empty?
|
1121
|
+
|
1122
|
+
if id
|
1123
|
+
@vars[id] ||= begin
|
1124
|
+
v = RDF::Query::Variable.new(id)
|
1125
|
+
v.distinguished = distinguished
|
1126
|
+
v
|
1127
|
+
end
|
1128
|
+
else
|
1129
|
+
unless distinguished
|
1130
|
+
# Allocate a non-distinguished variable identifier
|
1131
|
+
id = @nd_var_gen
|
1132
|
+
@nd_var_gen = id.succ
|
1133
|
+
end
|
1134
|
+
v = RDF::Query::Variable.new(id)
|
1135
|
+
v.distinguished = distinguished
|
1136
|
+
v
|
1137
|
+
end
|
1138
|
+
end
|
1139
|
+
|
1140
|
+
# Create URIs
|
1141
|
+
def iri(value)
|
1142
|
+
# If we have a base URI, use that when constructing a new URI
|
1143
|
+
iri = if base_uri
|
1144
|
+
u = base_uri.join(value.to_s)
|
1145
|
+
u.lexical = "<#{value}>" unless u.to_s == value.to_s || resolve_iris?
|
1146
|
+
u
|
1147
|
+
else
|
1148
|
+
RDF::URI(value)
|
1149
|
+
end
|
1150
|
+
|
1151
|
+
#iri.validate! if validate? && iri.respond_to?(:validate)
|
1152
|
+
#iri = RDF::URI.intern(iri) if intern?
|
1153
|
+
iri
|
1154
|
+
end
|
1155
|
+
|
1156
|
+
def ns(prefix, suffix)
|
1157
|
+
base = prefix(prefix).to_s
|
1158
|
+
suffix = suffix.to_s.sub(/^\#/, "") if base.index("#")
|
1159
|
+
debug {"ns(#{prefix.inspect}): base: '#{base}', suffix: '#{suffix}'"}
|
1160
|
+
iri = iri(base + suffix.to_s)
|
1161
|
+
# Cause URI to be serialized as a lexical
|
1162
|
+
iri.lexical = "#{prefix}:#{suffix}" unless resolve_iris?
|
1163
|
+
iri
|
1164
|
+
end
|
1165
|
+
|
1166
|
+
# Create a literal
|
1167
|
+
def literal(value, options = {})
|
1168
|
+
options = options.dup
|
1169
|
+
# Internal representation is to not use xsd:string, although it could arguably go the other way.
|
1170
|
+
options.delete(:datatype) if options[:datatype] == RDF::XSD.string
|
1171
|
+
debug("literal") do
|
1172
|
+
"value: #{value.inspect}, " +
|
1173
|
+
"options: #{options.inspect}, " +
|
1174
|
+
"validate: #{validate?.inspect}, "
|
1175
|
+
end
|
1176
|
+
RDF::Literal.new(value, options.merge(:validate => validate?))
|
1177
|
+
end
|
1178
|
+
|
1179
|
+
# Take collection of objects and create RDF Collection using rdf:first, rdf:rest and rdf:nil
|
1180
|
+
# @param [Hash] data Production Data
|
1181
|
+
def expand_collection(data)
|
1182
|
+
# Add any triples generated from deeper productions
|
1183
|
+
add_prod_datum(:pattern, data[:pattern])
|
1184
|
+
|
1185
|
+
# Create list items for each element in data[:GraphNode]
|
1186
|
+
first = col = data[:Collection]
|
1187
|
+
list = data[:GraphNode].to_a.flatten.compact
|
1188
|
+
last = list.pop
|
1189
|
+
|
1190
|
+
list.each do |r|
|
1191
|
+
add_pattern(:Collection, :subject => first, :predicate => RDF["first"], :object => r)
|
1192
|
+
rest = bnode()
|
1193
|
+
add_pattern(:Collection, :subject => first, :predicate => RDF["rest"], :object => rest)
|
1194
|
+
first = rest
|
1195
|
+
end
|
1196
|
+
|
1197
|
+
if last
|
1198
|
+
add_pattern(:Collection, :subject => first, :predicate => RDF["first"], :object => last)
|
1199
|
+
end
|
1200
|
+
add_pattern(:Collection, :subject => first, :predicate => RDF["rest"], :object => RDF["nil"])
|
1201
|
+
end
|
1202
|
+
|
1203
|
+
# add a pattern
|
1204
|
+
#
|
1205
|
+
# @param [String] production Production generating pattern
|
1206
|
+
# @param [Hash{Symbol => Object}] options
|
1207
|
+
def add_pattern(production, options)
|
1208
|
+
progress(production, "add_pattern: #{options.inspect}")
|
1209
|
+
progress(production, "[:pattern, #{options[:subject]}, #{options[:predicate]}, #{options[:object]}]")
|
1210
|
+
triple = {}
|
1211
|
+
options.each_pair do |r, v|
|
1212
|
+
if v.is_a?(Array) && v.flatten.length == 1
|
1213
|
+
v = v.flatten.first
|
1214
|
+
end
|
1215
|
+
if validate? && !v.is_a?(RDF::Term)
|
1216
|
+
error("add_pattern", "Expected #{r} to be a resource, but it was #{v.inspect}",
|
1217
|
+
:production => production)
|
1218
|
+
end
|
1219
|
+
triple[r] = v
|
1220
|
+
end
|
1221
|
+
add_prod_datum(:pattern, RDF::Query::Pattern.new(triple))
|
1222
|
+
end
|
1223
|
+
|
1224
|
+
# Flatten a Data in form of :filter => [op+ bgp?], without a query into filter and query creating exprlist, if necessary
|
1225
|
+
# @return [Array[:expr, query]]
|
1226
|
+
def flatten_filter(data)
|
1227
|
+
query = data.pop if data.last.respond_to?(:execute)
|
1228
|
+
expr = data.length > 1 ? SPARQL::Algebra::Operator::Exprlist.new(*data) : data.first
|
1229
|
+
[expr, query]
|
1230
|
+
end
|
1231
|
+
|
1232
|
+
# Merge query modifiers, datasets, and projections
|
1233
|
+
def merge_modifiers(data)
|
1234
|
+
query = data[:query] ? data[:query].first : SPARQL::Algebra::Operator::BGP.new
|
1235
|
+
|
1236
|
+
# Add datasets and modifiers in order
|
1237
|
+
query = SPARQL::Algebra::Expression[:order, data[:order].first, query] if data[:order]
|
1238
|
+
|
1239
|
+
query = SPARQL::Algebra::Expression[:project, data[:Var], query] if data[:Var]
|
1240
|
+
|
1241
|
+
query = SPARQL::Algebra::Expression[data[:DISTINCT_REDUCED].first, query] if data[:DISTINCT_REDUCED]
|
1242
|
+
|
1243
|
+
query = SPARQL::Algebra::Expression[:slice, data[:slice][0], data[:slice][1], query] if data[:slice]
|
1244
|
+
|
1245
|
+
query = SPARQL::Algebra::Expression[:dataset, data[:dataset], query] if data[:dataset]
|
1246
|
+
|
1247
|
+
query
|
1248
|
+
end
|
1249
|
+
|
1250
|
+
# Add joined expressions in for prod1 (op prod2)* to form (op (op 1 2) 3)
|
1251
|
+
def add_operator_expressions(production, data)
|
1252
|
+
# Iterate through expression to create binary operations
|
1253
|
+
res = data[:Expression]
|
1254
|
+
while data[production] && !data[production].empty?
|
1255
|
+
res = SPARQL::Algebra::Expression[data[production].shift + res + data[production].shift]
|
1256
|
+
end
|
1257
|
+
add_prod_datum(:Expression, res)
|
1258
|
+
end
|
1259
|
+
|
1260
|
+
# Accumulate joined expressions in for prod1 (op prod2)* to form (op (op 1 2) 3)
|
1261
|
+
def accumulate_operator_expressions(operator, production, data)
|
1262
|
+
if data[operator]
|
1263
|
+
# Add [op data] to stack based on "production"
|
1264
|
+
add_prod_datum(production, [data[operator], data[:Expression]])
|
1265
|
+
# Add previous [op data] information
|
1266
|
+
add_prod_datum(production, data[production])
|
1267
|
+
else
|
1268
|
+
# No operator, forward :Expression
|
1269
|
+
add_prod_datum(:Expression, data[:Expression])
|
1270
|
+
end
|
1271
|
+
end
|
1272
|
+
|
1273
|
+
##
|
1274
|
+
# Progress output when debugging
|
1275
|
+
# @overload debug(node, message)
|
1276
|
+
# @param [String] node relative location in input
|
1277
|
+
# @param [String] message ("")
|
1278
|
+
#
|
1279
|
+
# @overload debug(message)
|
1280
|
+
# @param [String] message ("")
|
1281
|
+
#
|
1282
|
+
# @yieldreturn [String] added to message
|
1283
|
+
def debug(*args)
|
1284
|
+
return unless @options[:debug] || RDF::Turtle.debug?
|
1285
|
+
options = args.last.is_a?(Hash) ? args.pop : {}
|
1286
|
+
debug_level = options.fetch(:level, 1)
|
1287
|
+
return unless debug_level <= DEBUG_LEVEL
|
1288
|
+
depth = options[:depth] || self.depth
|
1289
|
+
message = args.pop
|
1290
|
+
message = message.call if message.is_a?(Proc)
|
1291
|
+
args << message if message
|
1292
|
+
args << yield if block_given?
|
1293
|
+
message = "#{args.join(': ')}"
|
1294
|
+
str = "[#{@lineno}]#{' ' * depth}#{message}"
|
1295
|
+
case @options[:debug]
|
1296
|
+
when Array
|
1297
|
+
options[:debug] << str
|
1298
|
+
else
|
1299
|
+
$stderr.puts str
|
1300
|
+
end
|
1301
|
+
end
|
1302
|
+
|
1303
|
+
end # class Parser
|
1304
|
+
end # module SPARQL::Grammar
|