sparql 0.0.1 → 0.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/AUTHORS +3 -0
- data/CREDITS +0 -0
- data/README.markdown +103 -53
- data/UNLICENSE +24 -0
- data/VERSION +1 -0
- data/bin/sparql +87 -0
- data/lib/sparql.rb +105 -22
- data/lib/sparql/algebra.rb +369 -0
- data/lib/sparql/algebra/evaluatable.rb +37 -0
- data/lib/sparql/algebra/expression.rb +284 -0
- data/lib/sparql/algebra/extensions.rb +159 -0
- data/lib/sparql/algebra/operator.rb +492 -0
- data/lib/sparql/algebra/operator/add.rb +34 -0
- data/lib/sparql/algebra/operator/and.rb +65 -0
- data/lib/sparql/algebra/operator/asc.rb +29 -0
- data/lib/sparql/algebra/operator/ask.rb +46 -0
- data/lib/sparql/algebra/operator/base.rb +46 -0
- data/lib/sparql/algebra/operator/bgp.rb +26 -0
- data/lib/sparql/algebra/operator/bound.rb +48 -0
- data/lib/sparql/algebra/operator/compare.rb +84 -0
- data/lib/sparql/algebra/operator/construct.rb +85 -0
- data/lib/sparql/algebra/operator/dataset.rb +77 -0
- data/lib/sparql/algebra/operator/datatype.rb +42 -0
- data/lib/sparql/algebra/operator/desc.rb +17 -0
- data/lib/sparql/algebra/operator/describe.rb +71 -0
- data/lib/sparql/algebra/operator/distinct.rb +50 -0
- data/lib/sparql/algebra/operator/divide.rb +43 -0
- data/lib/sparql/algebra/operator/equal.rb +32 -0
- data/lib/sparql/algebra/operator/exprlist.rb +52 -0
- data/lib/sparql/algebra/operator/filter.rb +71 -0
- data/lib/sparql/algebra/operator/graph.rb +28 -0
- data/lib/sparql/algebra/operator/greater_than.rb +32 -0
- data/lib/sparql/algebra/operator/greater_than_or_equal.rb +33 -0
- data/lib/sparql/algebra/operator/is_blank.rb +35 -0
- data/lib/sparql/algebra/operator/is_iri.rb +37 -0
- data/lib/sparql/algebra/operator/is_literal.rb +36 -0
- data/lib/sparql/algebra/operator/join.rb +67 -0
- data/lib/sparql/algebra/operator/lang.rb +29 -0
- data/lib/sparql/algebra/operator/lang_matches.rb +53 -0
- data/lib/sparql/algebra/operator/left_join.rb +95 -0
- data/lib/sparql/algebra/operator/less_than.rb +32 -0
- data/lib/sparql/algebra/operator/less_than_or_equal.rb +32 -0
- data/lib/sparql/algebra/operator/minus.rb +31 -0
- data/lib/sparql/algebra/operator/multiply.rb +34 -0
- data/lib/sparql/algebra/operator/not.rb +35 -0
- data/lib/sparql/algebra/operator/not_equal.rb +26 -0
- data/lib/sparql/algebra/operator/or.rb +65 -0
- data/lib/sparql/algebra/operator/order.rb +69 -0
- data/lib/sparql/algebra/operator/plus.rb +31 -0
- data/lib/sparql/algebra/operator/prefix.rb +45 -0
- data/lib/sparql/algebra/operator/project.rb +46 -0
- data/lib/sparql/algebra/operator/reduced.rb +47 -0
- data/lib/sparql/algebra/operator/regex.rb +70 -0
- data/lib/sparql/algebra/operator/same_term.rb +46 -0
- data/lib/sparql/algebra/operator/slice.rb +60 -0
- data/lib/sparql/algebra/operator/str.rb +35 -0
- data/lib/sparql/algebra/operator/subtract.rb +32 -0
- data/lib/sparql/algebra/operator/union.rb +55 -0
- data/lib/sparql/algebra/query.rb +99 -0
- data/lib/sparql/algebra/sxp_extensions.rb +35 -0
- data/lib/sparql/algebra/version.rb +20 -0
- data/lib/sparql/extensions.rb +102 -0
- data/lib/sparql/grammar.rb +298 -0
- data/lib/sparql/grammar/lexer.rb +609 -0
- data/lib/sparql/grammar/parser.rb +1383 -0
- data/lib/sparql/grammar/parser/meta.rb +1801 -0
- data/lib/sparql/results.rb +220 -0
- data/lib/sparql/version.rb +20 -0
- metadata +232 -62
- data/Rakefile +0 -22
- data/coverage/index.html +0 -252
- data/coverage/lib-sparql-execute_sparql_rb.html +0 -621
- data/coverage/lib-sparql_rb.html +0 -622
- data/lib/sparql/execute_sparql.rb +0 -27
- data/lib/sparql/sparql.treetop +0 -159
- data/sparql.gemspec +0 -16
- data/spec/spec.opts +0 -2
- data/spec/spec_helper.rb +0 -24
- data/spec/unit/graph_parsing_spec.rb +0 -76
- data/spec/unit/iri_parsing_spec.rb +0 -46
- data/spec/unit/prefixed_names_parsing_spec.rb +0 -40
- data/spec/unit/primitives_parsing_spec.rb +0 -26
- data/spec/unit/sparql_parsing_spec.rb +0 -72
- data/spec/unit/variables_parsing_spec.rb +0 -36
@@ -0,0 +1,1383 @@
|
|
1
|
+
module SPARQL; module Grammar
|
2
|
+
##
|
3
|
+
# A parser for the SPARQL 1.0 grammar.
|
4
|
+
#
|
5
|
+
# @see http://www.w3.org/TR/rdf-sparql-query/#grammar
|
6
|
+
# @see http://en.wikipedia.org/wiki/LR_parser
|
7
|
+
# @see http://www.w3.org/2000/10/swap/grammar/predictiveParser.py
|
8
|
+
# @see http://www.w3.org/2001/sw/DataAccess/rq23/parsers/sparql.ttl
|
9
|
+
class Parser
|
10
|
+
include SPARQL::Grammar::Meta
|
11
|
+
|
12
|
+
START = SPARQL_GRAMMAR.Query
|
13
|
+
RDF_TYPE = (a = RDF.type.dup; a.lexical = 'a'; a).freeze
|
14
|
+
|
15
|
+
##
|
16
|
+
# Initializes a new parser instance.
|
17
|
+
#
|
18
|
+
# @param [String, #to_s] input
|
19
|
+
# @param [Hash{Symbol => Object}] options
|
20
|
+
# @option options [Hash] :prefixes (Hash.new)
|
21
|
+
# the prefix mappings to use (for acessing intermediate parser productions)
|
22
|
+
# @option options [#to_s] :base_uri (nil)
|
23
|
+
# the base URI to use when resolving relative URIs (for acessing intermediate parser productions)
|
24
|
+
# @option options [#to_s] :anon_base ("b0")
|
25
|
+
# Basis for generating anonymous Nodes
|
26
|
+
# @option options [Boolean] :resolve_uris (false)
|
27
|
+
# Resolve prefix and relative IRIs, otherwise, when serializing the parsed SSE
|
28
|
+
# as S-Expressions, use the original prefixed and relative URIs along with `base` and `prefix`
|
29
|
+
# definitions.
|
30
|
+
# @option options [Boolean] :validate (false)
|
31
|
+
# whether to validate the parsed statements and values
|
32
|
+
# @option options [Boolean] :progress
|
33
|
+
# Show progress of parser productions
|
34
|
+
# @option options [Boolean] :debug
|
35
|
+
# Detailed debug output
|
36
|
+
# @return [SPARQL::Grammar::Parser]
|
37
|
+
def initialize(input = nil, options = {})
|
38
|
+
@options = {:anon_base => "b0", :validate => false}.merge(options)
|
39
|
+
self.input = input if input
|
40
|
+
@productions = []
|
41
|
+
@vars = {}
|
42
|
+
@nd_var_gen = "0"
|
43
|
+
end
|
44
|
+
|
45
|
+
##
|
46
|
+
# Any additional options for the parser.
|
47
|
+
#
|
48
|
+
# @return [Hash]
|
49
|
+
attr_reader :options
|
50
|
+
|
51
|
+
##
|
52
|
+
# The current input string being processed.
|
53
|
+
#
|
54
|
+
# @return [String]
|
55
|
+
attr_accessor :input
|
56
|
+
|
57
|
+
##
|
58
|
+
# The current input tokens being processed.
|
59
|
+
#
|
60
|
+
# @return [Array<Token>]
|
61
|
+
attr_reader :tokens
|
62
|
+
|
63
|
+
##
|
64
|
+
# The internal representation of the result using hierarch of RDF objects and SPARQL::Algebra::Operator
|
65
|
+
# objects.
|
66
|
+
# @return [Array]
|
67
|
+
# @see http://sparql.rubyforge.org/algebra
|
68
|
+
attr_accessor :result
|
69
|
+
|
70
|
+
##
|
71
|
+
# @param [IO, StringIO, Lexer, Array, String, #to_s] input
|
72
|
+
# Query may be an array of lexed tokens, a lexer, or a
|
73
|
+
# string or open file.
|
74
|
+
# @return [void]
|
75
|
+
def input=(input)
|
76
|
+
case input
|
77
|
+
when Array
|
78
|
+
@input = nil # FIXME
|
79
|
+
@tokens = input
|
80
|
+
else
|
81
|
+
lexer = input.is_a?(Lexer) ? input : Lexer.new(input, @options)
|
82
|
+
@input = lexer.input
|
83
|
+
@tokens = lexer.to_a
|
84
|
+
end
|
85
|
+
end
|
86
|
+
|
87
|
+
##
|
88
|
+
# Returns `true` if the input string is syntactically valid.
|
89
|
+
#
|
90
|
+
# @return [Boolean]
|
91
|
+
def valid?
|
92
|
+
parse
|
93
|
+
rescue Error
|
94
|
+
false
|
95
|
+
end
|
96
|
+
|
97
|
+
# @return [String]
|
98
|
+
def to_sse
|
99
|
+
@result
|
100
|
+
end
|
101
|
+
|
102
|
+
def to_s
|
103
|
+
@result.to_sxp
|
104
|
+
end
|
105
|
+
|
106
|
+
# Parse query
|
107
|
+
#
|
108
|
+
# The result is a SPARQL Algebra S-List. Productions return an array such as the following:
|
109
|
+
#
|
110
|
+
# (prefix ((: <http://example/>))
|
111
|
+
# (union
|
112
|
+
# (bgp (triple ?s ?p ?o))
|
113
|
+
# (graph ?g
|
114
|
+
# (bgp (triple ?s ?p ?o)))))
|
115
|
+
#
|
116
|
+
# @param [Symbol, #to_s] prod The starting production for the parser.
|
117
|
+
# It may be a URI from the grammar, or a symbol representing the local_name portion of the grammar URI.
|
118
|
+
# @return [Array]
|
119
|
+
# @see http://www.w3.org/2001/sw/DataAccess/rq23/rq24-algebra.html
|
120
|
+
# @see http://axel.deri.ie/sparqltutorial/ESWC2007_SPARQL_Tutorial_unit2b.pdf
|
121
|
+
def parse(prod = START)
|
122
|
+
@prod_data = [{}]
|
123
|
+
prod = prod.to_s.split("#").last.to_sym unless prod.is_a?(Symbol)
|
124
|
+
todo_stack = [{:prod => prod, :terms => nil}]
|
125
|
+
|
126
|
+
while !todo_stack.empty?
|
127
|
+
pushed = false
|
128
|
+
if todo_stack.last[:terms].nil?
|
129
|
+
todo_stack.last[:terms] = []
|
130
|
+
token = tokens.first
|
131
|
+
@lineno = token.lineno if token
|
132
|
+
debug("parse(token)") {"#{token.inspect}, prod #{todo_stack.last[:prod]}, depth #{todo_stack.length}"}
|
133
|
+
|
134
|
+
# Got an opened production
|
135
|
+
onStart(abbr(todo_stack.last[:prod]))
|
136
|
+
break if token.nil?
|
137
|
+
|
138
|
+
cur_prod = todo_stack.last[:prod]
|
139
|
+
prod_branch = BRANCHES[cur_prod.to_sym]
|
140
|
+
error("parse", "No branches found for '#{abbr(cur_prod)}'",
|
141
|
+
:production => cur_prod, :token => token) if prod_branch.nil?
|
142
|
+
sequence = prod_branch[token.representation]
|
143
|
+
debug("parse(production)") do
|
144
|
+
"cur_prod #{cur_prod}, " +
|
145
|
+
"token #{token.representation.inspect} " +
|
146
|
+
"prod_branch #{prod_branch.keys.inspect}, " +
|
147
|
+
"sequence #{sequence.inspect}"
|
148
|
+
end
|
149
|
+
if sequence.nil?
|
150
|
+
expected = prod_branch.values.uniq.map {|u| u.map {|v| abbr(v).inspect}.join(",")}
|
151
|
+
error("parse", "Found '#{token.inspect}' when parsing a #{abbr(cur_prod)}. expected #{expected.join(' | ')}",
|
152
|
+
:production => cur_prod, :token => token)
|
153
|
+
end
|
154
|
+
todo_stack.last[:terms] += sequence
|
155
|
+
end
|
156
|
+
|
157
|
+
debug("parse(terms)") {"stack #{todo_stack.last.inspect}, depth #{todo_stack.length}"}
|
158
|
+
while !todo_stack.last[:terms].to_a.empty?
|
159
|
+
term = todo_stack.last[:terms].shift
|
160
|
+
debug {"parse tokens(#{term}): #{tokens.inspect}"}
|
161
|
+
if tokens.map(&:representation).include?(term)
|
162
|
+
token = accept(term)
|
163
|
+
@lineno = token.lineno if token
|
164
|
+
debug("parse") {"term(#{token.inspect}): #{term}"}
|
165
|
+
if token
|
166
|
+
onToken(abbr(term), token.value)
|
167
|
+
else
|
168
|
+
error("parse", "Found '#{word}...'; #{term} expected",
|
169
|
+
:production => todo_stack.last[:prod], :token => tokens.first)
|
170
|
+
end
|
171
|
+
else
|
172
|
+
todo_stack << {:prod => term, :terms => nil}
|
173
|
+
debug("parse(push)") {"stack #{term}, depth #{todo_stack.length}"}
|
174
|
+
pushed = true
|
175
|
+
break
|
176
|
+
end
|
177
|
+
end
|
178
|
+
|
179
|
+
while !pushed && !todo_stack.empty? && todo_stack.last[:terms].to_a.empty?
|
180
|
+
debug("parse(pop)") {"stack #{todo_stack.last.inspect}, depth #{todo_stack.length}"}
|
181
|
+
todo_stack.pop
|
182
|
+
onFinish
|
183
|
+
end
|
184
|
+
end
|
185
|
+
while !todo_stack.empty?
|
186
|
+
debug("parse(pop)") {"stack #{todo_stack.last.inspect}, depth #{todo_stack.length}"}
|
187
|
+
todo_stack.pop
|
188
|
+
onFinish
|
189
|
+
end
|
190
|
+
|
191
|
+
# The last thing on the @prod_data stack is the result
|
192
|
+
@result = case
|
193
|
+
when !prod_data.is_a?(Hash)
|
194
|
+
prod_data
|
195
|
+
when prod_data.empty?
|
196
|
+
nil
|
197
|
+
when prod_data[:query]
|
198
|
+
prod_data[:query].to_a.length == 1 ? prod_data[:query].first : prod_data[:query]
|
199
|
+
else
|
200
|
+
key = prod_data.keys.first
|
201
|
+
[key] + prod_data[key] # Creates [:key, [:triple], ...]
|
202
|
+
end
|
203
|
+
end
|
204
|
+
|
205
|
+
##
|
206
|
+
# Returns the URI prefixes currently defined for this parser.
|
207
|
+
#
|
208
|
+
# @example
|
209
|
+
# parser.prefixes[:dc] #=> RDF::URI('http://purl.org/dc/terms/')
|
210
|
+
#
|
211
|
+
# @return [Hash{Symbol => RDF::URI}]
|
212
|
+
# @since 0.3.0
|
213
|
+
def prefixes
|
214
|
+
@options[:prefixes] ||= {}
|
215
|
+
end
|
216
|
+
|
217
|
+
##
|
218
|
+
# Defines the given URI prefixes for this parser.
|
219
|
+
#
|
220
|
+
# @example
|
221
|
+
# parser.prefixes = {
|
222
|
+
# :dc => RDF::URI('http://purl.org/dc/terms/'),
|
223
|
+
# }
|
224
|
+
#
|
225
|
+
# @param [Hash{Symbol => RDF::URI}] prefixes
|
226
|
+
# @return [Hash{Symbol => RDF::URI}]
|
227
|
+
# @since 0.3.0
|
228
|
+
def prefixes=(prefixes)
|
229
|
+
@options[:prefixes] = prefixes
|
230
|
+
end
|
231
|
+
|
232
|
+
##
|
233
|
+
# Defines the given named URI prefix for this parser.
|
234
|
+
#
|
235
|
+
# @example Defining a URI prefix
|
236
|
+
# parser.prefix :dc, RDF::URI('http://purl.org/dc/terms/')
|
237
|
+
#
|
238
|
+
# @example Returning a URI prefix
|
239
|
+
# parser.prefix(:dc) #=> RDF::URI('http://purl.org/dc/terms/')
|
240
|
+
#
|
241
|
+
# @overload prefix(name, uri)
|
242
|
+
# @param [Symbol, #to_s] name
|
243
|
+
# @param [RDF::URI, #to_s] uri
|
244
|
+
#
|
245
|
+
# @overload prefix(name)
|
246
|
+
# @param [Symbol, #to_s] name
|
247
|
+
#
|
248
|
+
# @return [RDF::URI]
|
249
|
+
def prefix(name, uri = nil)
|
250
|
+
name = name.to_s.empty? ? nil : (name.respond_to?(:to_sym) ? name.to_sym : name.to_s.to_sym)
|
251
|
+
uri.nil? ? prefixes[name] : prefixes[name] = uri
|
252
|
+
end
|
253
|
+
|
254
|
+
##
|
255
|
+
# Returns the Base URI defined for the parser,
|
256
|
+
# as specified or when parsing a BASE prologue element.
|
257
|
+
#
|
258
|
+
# @example
|
259
|
+
# parser.base #=> RDF::URI('http://example.com/')
|
260
|
+
#
|
261
|
+
# @return [HRDF::URI]
|
262
|
+
def base_uri
|
263
|
+
@options[:base_uri]
|
264
|
+
end
|
265
|
+
|
266
|
+
##
|
267
|
+
# Set the Base URI to use for this parser.
|
268
|
+
#
|
269
|
+
# @param [RDF::URI, #to_s] uri
|
270
|
+
#
|
271
|
+
# @example
|
272
|
+
# parser.base_uri = RDF::URI('http://purl.org/dc/terms/')
|
273
|
+
#
|
274
|
+
# @return [RDF::URI]
|
275
|
+
def base_uri=(uri)
|
276
|
+
@options[:base_uri] = RDF::URI(uri)
|
277
|
+
end
|
278
|
+
|
279
|
+
##
|
280
|
+
# Returns `true` if parsed statements and values should be validated.
|
281
|
+
#
|
282
|
+
# @return [Boolean] `true` or `false`
|
283
|
+
# @since 0.3.0
|
284
|
+
def validate?
|
285
|
+
@options[:validate]
|
286
|
+
end
|
287
|
+
|
288
|
+
private
|
289
|
+
|
290
|
+
# Handlers used to define actions for each productions.
|
291
|
+
# If a context is defined, create a producation data element and add to the @prod_data stack
|
292
|
+
# If entries are defined, pass production data to :start and/or :finish handlers
|
293
|
+
def contexts(production)
|
294
|
+
case production
|
295
|
+
when :Query
|
296
|
+
# [1] Query ::= Prologue ( SelectQuery | ConstructQuery | DescribeQuery | AskQuery )
|
297
|
+
{
|
298
|
+
:finish => lambda { |data| finalize_query(data) }
|
299
|
+
}
|
300
|
+
when :Prologue
|
301
|
+
# [2] Prologue ::= BaseDecl? PrefixDecl*
|
302
|
+
{
|
303
|
+
:finish => lambda { |data|
|
304
|
+
unless options[:resolve_uris]
|
305
|
+
# Only output if we're not resolving URIs internally
|
306
|
+
add_prod_datum(:BaseDecl, data[:BaseDecl])
|
307
|
+
add_prod_data(:PrefixDecl, data[:PrefixDecl]) if data[:PrefixDecl]
|
308
|
+
end
|
309
|
+
}
|
310
|
+
}
|
311
|
+
when :BaseDecl
|
312
|
+
# [3] BaseDecl ::= 'BASE' IRI_REF
|
313
|
+
{
|
314
|
+
:finish => lambda { |data|
|
315
|
+
self.base_uri = uri(data[:iri].last)
|
316
|
+
add_prod_datum(:BaseDecl, data[:iri].last) unless options[:resolve_uris]
|
317
|
+
}
|
318
|
+
}
|
319
|
+
when :PrefixDecl
|
320
|
+
# [4] PrefixDecl := 'PREFIX' PNAME_NS IRI_REF";
|
321
|
+
{
|
322
|
+
:finish => lambda { |data|
|
323
|
+
if data[:iri]
|
324
|
+
self.prefix(data[:prefix], data[:iri].last)
|
325
|
+
add_prod_data(:PrefixDecl, data[:iri].unshift("#{data[:prefix]}:".to_sym))
|
326
|
+
end
|
327
|
+
}
|
328
|
+
}
|
329
|
+
when :SelectQuery
|
330
|
+
# [5] SelectQuery ::= 'SELECT' ( 'DISTINCT' | 'REDUCED' )? ( Var+ | '*' ) DatasetClause* WhereClause SolutionModifier
|
331
|
+
{
|
332
|
+
:finish => lambda { |data|
|
333
|
+
query = merge_modifiers(data)
|
334
|
+
add_prod_datum(:query, query)
|
335
|
+
}
|
336
|
+
}
|
337
|
+
when :ConstructQuery
|
338
|
+
# [6] ConstructQuery ::= 'CONSTRUCT' ConstructTemplate DatasetClause* WhereClause SolutionModifier
|
339
|
+
{
|
340
|
+
:finish => lambda { |data|
|
341
|
+
query = merge_modifiers(data)
|
342
|
+
template = data[:ConstructTemplate] || []
|
343
|
+
|
344
|
+
add_prod_datum(:query, Algebra::Expression[:construct, template, query])
|
345
|
+
}
|
346
|
+
}
|
347
|
+
when :DescribeQuery
|
348
|
+
# [7] DescribeQuery ::= 'DESCRIBE' ( VarOrIRIref+ | '*' ) DatasetClause* WhereClause? SolutionModifier
|
349
|
+
{
|
350
|
+
:finish => lambda { |data|
|
351
|
+
query = merge_modifiers(data)
|
352
|
+
to_describe = data[:VarOrIRIref] || []
|
353
|
+
query = Algebra::Expression[:describe, to_describe, query]
|
354
|
+
add_prod_datum(:query, query)
|
355
|
+
}
|
356
|
+
}
|
357
|
+
when :AskQuery
|
358
|
+
# [8] AskQuery ::= 'ASK' DatasetClause* WhereClause
|
359
|
+
{
|
360
|
+
:finish => lambda { |data|
|
361
|
+
query = merge_modifiers(data)
|
362
|
+
add_prod_datum(:query, Algebra::Expression[:ask, query])
|
363
|
+
}
|
364
|
+
}
|
365
|
+
when :DefaultGraphClause
|
366
|
+
# [10] DefaultGraphClause ::= SourceSelector
|
367
|
+
{
|
368
|
+
:finish => lambda { |data|
|
369
|
+
add_prod_datum(:dataset, data[:IRIref])
|
370
|
+
}
|
371
|
+
}
|
372
|
+
when :NamedGraphClause
|
373
|
+
# [11] NamedGraphClause ::= 'NAMED' SourceSelector
|
374
|
+
{
|
375
|
+
:finish => lambda { |data|
|
376
|
+
add_prod_data(:dataset, data[:IRIref].unshift(:named))
|
377
|
+
}
|
378
|
+
}
|
379
|
+
when :SolutionModifier
|
380
|
+
# [14] SolutionModifier ::= OrderClause? LimitOffsetClauses?
|
381
|
+
{
|
382
|
+
:finish => lambda { |data|
|
383
|
+
add_prod_datum(:order, data[:order])
|
384
|
+
add_prod_datum(:slice, data[:slice])
|
385
|
+
}
|
386
|
+
}
|
387
|
+
when :LimitOffsetClauses
|
388
|
+
# [15] LimitOffsetClauses ::= ( LimitClause OffsetClause? | OffsetClause LimitClause? )
|
389
|
+
{
|
390
|
+
:finish => lambda { |data|
|
391
|
+
return unless data[:limit] || data[:offset]
|
392
|
+
limit = data[:limit] ? data[:limit].last : :_
|
393
|
+
offset = data[:offset] ? data[:offset].last : :_
|
394
|
+
add_prod_data(:slice, offset, limit)
|
395
|
+
}
|
396
|
+
}
|
397
|
+
when :OrderClause
|
398
|
+
# [16] OrderClause ::= 'ORDER' 'BY' OrderCondition+
|
399
|
+
{
|
400
|
+
:finish => lambda { |data|
|
401
|
+
# Output 2puls of order conditions from left to right
|
402
|
+
res = data[:OrderCondition]
|
403
|
+
if res = data[:OrderCondition]
|
404
|
+
res = [res] if [:asc, :desc].include?(res[0]) # Special case when there's only one condition and it's ASC (x) or DESC (x)
|
405
|
+
add_prod_data(:order, res)
|
406
|
+
end
|
407
|
+
}
|
408
|
+
}
|
409
|
+
when :OrderCondition
|
410
|
+
# [17] OrderCondition ::= ( ( 'ASC' | 'DESC' ) BrackettedExpression ) | ( Constraint | Var )
|
411
|
+
{
|
412
|
+
:finish => lambda { |data|
|
413
|
+
if data[:OrderDirection]
|
414
|
+
add_prod_datum(:OrderCondition, Algebra::Expression.for(data[:OrderDirection] + data[:Expression]))
|
415
|
+
else
|
416
|
+
add_prod_datum(:OrderCondition, data[:Constraint] || data[:Var])
|
417
|
+
end
|
418
|
+
}
|
419
|
+
}
|
420
|
+
when :LimitClause
|
421
|
+
# [18] LimitClause ::= 'LIMIT' INTEGER
|
422
|
+
{
|
423
|
+
:finish => lambda { |data| add_prod_datum(:limit, data[:literal]) }
|
424
|
+
}
|
425
|
+
when :OffsetClause
|
426
|
+
# [19] OffsetClause ::= 'OFFSET' INTEGER
|
427
|
+
{
|
428
|
+
:finish => lambda { |data| add_prod_datum(:offset, data[:literal]) }
|
429
|
+
}
|
430
|
+
when :GroupGraphPattern
|
431
|
+
# [20] GroupGraphPattern ::= '{' TriplesBlock? ( ( GraphPatternNotTriples | Filter ) '.'? TriplesBlock? )* '}'
|
432
|
+
{
|
433
|
+
:finish => lambda { |data|
|
434
|
+
query_list = data[:query_list]
|
435
|
+
debug("GroupGraphPattern") {"ql #{query_list.to_a.inspect}"}
|
436
|
+
debug("GroupGraphPattern") {"q #{data[:query] ? data[:query].first.inspect : 'nil'}"}
|
437
|
+
|
438
|
+
if query_list
|
439
|
+
lhs = data[:query].to_a.first
|
440
|
+
while !query_list.empty?
|
441
|
+
rhs = query_list.shift
|
442
|
+
# Make the right-hand-side a Join with only a single operand, if it's not already and Operator
|
443
|
+
rhs = Algebra::Expression.for(:join, :placeholder, rhs) unless rhs.is_a?(Algebra::Operator)
|
444
|
+
debug("GroupGraphPattern(itr)") {"<= q: #{rhs.inspect}"}
|
445
|
+
debug("GroupGraphPattern(itr)") {"<= lhs: #{lhs ? lhs.inspect : 'nil'}"}
|
446
|
+
lhs ||= Algebra::Operator::BGP.new if rhs.is_a?(Algebra::Operator::LeftJoin)
|
447
|
+
if lhs
|
448
|
+
if rhs.operand(0) == :placeholder
|
449
|
+
rhs.operands[0] = lhs
|
450
|
+
else
|
451
|
+
rhs = Algebra::Operator::Join.new(lhs, rhs)
|
452
|
+
end
|
453
|
+
end
|
454
|
+
lhs = rhs
|
455
|
+
lhs = lhs.operand(1) if lhs.operand(0) == :placeholder
|
456
|
+
debug("GroupGraphPattern(itr)") {"=> lhs: #{lhs.inspect}"}
|
457
|
+
end
|
458
|
+
# Trivial simplification for :join or :union of one query
|
459
|
+
case lhs
|
460
|
+
when Algebra::Operator::Join, Algebra::Operator::Union
|
461
|
+
if lhs.operand(0) == :placeholder
|
462
|
+
lhs = lhs.operand(1)
|
463
|
+
debug("GroupGraphPattern(simplify)") {"=> lhs: #{lhs.inspect}"}
|
464
|
+
end
|
465
|
+
end
|
466
|
+
res = lhs
|
467
|
+
elsif data[:query]
|
468
|
+
res = data[:query].first
|
469
|
+
end
|
470
|
+
|
471
|
+
debug("GroupGraphPattern(pre-filter)") {"res: #{res.inspect}"}
|
472
|
+
|
473
|
+
if data[:filter]
|
474
|
+
expr, query = flatten_filter(data[:filter])
|
475
|
+
query = res || Algebra::Operator::BGP.new
|
476
|
+
# query should be nil
|
477
|
+
res = Algebra::Operator::Filter.new(expr, query)
|
478
|
+
end
|
479
|
+
add_prod_datum(:query, res)
|
480
|
+
}
|
481
|
+
}
|
482
|
+
when :_GraphPatternNotTriples_or_Filter_Dot_Opt_TriplesBlock_Opt
|
483
|
+
# Create a stack of GroupQuerys having a single graph element and resolve in GroupGraphPattern
|
484
|
+
{
|
485
|
+
:finish => lambda { |data|
|
486
|
+
lhs = data[:_GraphPatternNotTriples_or_Filter]
|
487
|
+
rhs = data[:query]
|
488
|
+
add_prod_datum(:query_list, lhs) if lhs
|
489
|
+
rhs = Algebra::Expression.for(:join, :placeholder, rhs.first) if rhs
|
490
|
+
add_prod_data(:query_list, rhs) if rhs
|
491
|
+
add_prod_datum(:filter, data[:filter])
|
492
|
+
}
|
493
|
+
}
|
494
|
+
when :_GraphPatternNotTriples_or_Filter
|
495
|
+
# Create a stack of Single operand Operators and resolve in GroupGraphPattern
|
496
|
+
{
|
497
|
+
:finish => lambda { |data|
|
498
|
+
add_prod_datum(:filter, data[:filter])
|
499
|
+
|
500
|
+
if data[:query]
|
501
|
+
res = data[:query].to_a.first
|
502
|
+
res = Algebra::Expression.for(:join, :placeholder, res) unless res.is_a?(Algebra::Operator)
|
503
|
+
add_prod_data(:_GraphPatternNotTriples_or_Filter, res)
|
504
|
+
end
|
505
|
+
}
|
506
|
+
}
|
507
|
+
when :TriplesBlock
|
508
|
+
# [21] TriplesBlock ::= TriplesSameSubject ( '.' TriplesBlock? )?
|
509
|
+
{
|
510
|
+
:finish => lambda { |data|
|
511
|
+
query = Algebra::Operator::BGP.new
|
512
|
+
data[:pattern].each {|p| query << p}
|
513
|
+
|
514
|
+
# Append triples from ('.' TriplesBlock? )?
|
515
|
+
data[:query].to_a.each {|q| query += q}
|
516
|
+
add_prod_datum(:query, query)
|
517
|
+
}
|
518
|
+
}
|
519
|
+
when :OptionalGraphPattern
|
520
|
+
# [23] OptionalGraphPattern ::= 'OPTIONAL' GroupGraphPattern
|
521
|
+
{
|
522
|
+
:finish => lambda { |data|
|
523
|
+
if data[:query]
|
524
|
+
expr = nil
|
525
|
+
query = data[:query].first
|
526
|
+
if query.is_a?(Algebra::Operator::Filter)
|
527
|
+
# Change to expression on left-join with query element
|
528
|
+
expr, query = query.operands
|
529
|
+
add_prod_data(:query, Algebra::Expression.for(:leftjoin, :placeholder, query, expr))
|
530
|
+
else
|
531
|
+
add_prod_data(:query, Algebra::Expression.for(:leftjoin, :placeholder, query))
|
532
|
+
end
|
533
|
+
end
|
534
|
+
}
|
535
|
+
}
|
536
|
+
when :GraphGraphPattern
|
537
|
+
# [24] GraphGraphPattern ::= 'GRAPH' VarOrIRIref GroupGraphPattern
|
538
|
+
{
|
539
|
+
:finish => lambda { |data|
|
540
|
+
if data[:query]
|
541
|
+
context = (data[:VarOrIRIref]).last
|
542
|
+
bgp = data[:query].first
|
543
|
+
if context
|
544
|
+
add_prod_data(:query, Algebra::Expression.for(:graph, context, bgp))
|
545
|
+
else
|
546
|
+
add_prod_data(:query, bgp)
|
547
|
+
end
|
548
|
+
end
|
549
|
+
}
|
550
|
+
}
|
551
|
+
when :GroupOrUnionGraphPattern
|
552
|
+
# [25] GroupOrUnionGraphPattern ::= GroupGraphPattern ( 'UNION' GroupGraphPattern )*
|
553
|
+
{
|
554
|
+
:finish => lambda { |data|
|
555
|
+
# Iterate through expression to create binary operations
|
556
|
+
res = data[:query].to_a.first
|
557
|
+
if data[:union]
|
558
|
+
while !data[:union].empty?
|
559
|
+
# Join union patterns together as Union operators
|
560
|
+
#puts "res: res: #{res}, input_prod: #{input_prod}, data[:union]: #{data[:union].first}"
|
561
|
+
lhs = res
|
562
|
+
rhs = data[:union].shift
|
563
|
+
res = Algebra::Expression.for(:union, lhs, rhs)
|
564
|
+
end
|
565
|
+
end
|
566
|
+
add_prod_datum(:query, res)
|
567
|
+
}
|
568
|
+
}
|
569
|
+
when :_UNION_GroupGraphPattern_Star
|
570
|
+
{
|
571
|
+
:finish => lambda { |data|
|
572
|
+
# Add [:union rhs] to stack based on ":union"
|
573
|
+
add_prod_data(:union, data[:query].to_a.first)
|
574
|
+
add_prod_data(:union, data[:union].first) if data[:union]
|
575
|
+
}
|
576
|
+
}
|
577
|
+
when :Filter
|
578
|
+
# [26] Filter ::= 'FILTER' Constraint
|
579
|
+
{
|
580
|
+
:finish => lambda { |data| add_prod_datum(:filter, data[:Constraint]) }
|
581
|
+
}
|
582
|
+
when :Constraint
|
583
|
+
# [27] Constraint ::= BrackettedExpression | BuiltInCall | FunctionCall
|
584
|
+
{
|
585
|
+
:finish => lambda { |data|
|
586
|
+
if data[:Expression]
|
587
|
+
# Resolve expression to the point it is either an atom or an s-exp
|
588
|
+
res = data[:Expression].to_a.first
|
589
|
+
add_prod_data(:Constraint, data[:Expression].to_a.first)
|
590
|
+
elsif data[:BuiltInCall]
|
591
|
+
add_prod_datum(:Constraint, data[:BuiltInCall])
|
592
|
+
elsif data[:Function]
|
593
|
+
add_prod_datum(:Constraint, data[:Function])
|
594
|
+
end
|
595
|
+
}
|
596
|
+
}
|
597
|
+
when :FunctionCall
|
598
|
+
# [28] FunctionCall ::= IRIref ArgList
|
599
|
+
{
|
600
|
+
:finish => lambda { |data| add_prod_data(:Function, data[:IRIref] + data[:ArgList]) }
|
601
|
+
}
|
602
|
+
when :ArgList
|
603
|
+
# [29] ArgList ::= ( NIL | '(' Expression ( ',' Expression )* ')' )
|
604
|
+
{
|
605
|
+
:finish => lambda { |data| data.values.each {|v| add_prod_datum(:ArgList, v)} }
|
606
|
+
}
|
607
|
+
when :ConstructTemplate
|
608
|
+
# [30] ConstructTemplate ::= '{' ConstructTriples? '}'
|
609
|
+
{
|
610
|
+
:start => lambda { |data| @nd_var_gen = false}, # Generate BNodes instead of non-distinguished variables
|
611
|
+
:finish => lambda { |data|
|
612
|
+
@nd_var_gen = "0"
|
613
|
+
add_prod_datum(:ConstructTemplate, data[:pattern])
|
614
|
+
add_prod_datum(:ConstructTemplate, data[:ConstructTemplate])
|
615
|
+
}
|
616
|
+
}
|
617
|
+
when :TriplesSameSubject
|
618
|
+
# [32] TriplesSameSubject ::= VarOrTerm PropertyListNotEmpty | TriplesNode PropertyList
|
619
|
+
{
|
620
|
+
:finish => lambda { |data| add_prod_datum(:pattern, data[:pattern]) }
|
621
|
+
}
|
622
|
+
when :PropertyListNotEmpty
|
623
|
+
# [33] PropertyListNotEmpty ::= Verb ObjectList ( ';' ( Verb ObjectList )? )*
|
624
|
+
{
|
625
|
+
:start => lambda {|data|
|
626
|
+
subject = prod_data[:VarOrTerm] || prod_data[:TriplesNode] || prod_data[:GraphNode]
|
627
|
+
error(nil, "Expected VarOrTerm or TriplesNode or GraphNode", :production => :PropertyListNotEmpty) if validate? && !subject
|
628
|
+
data[:Subject] = subject
|
629
|
+
},
|
630
|
+
:finish => lambda {|data| add_prod_datum(:pattern, data[:pattern])}
|
631
|
+
}
|
632
|
+
when :ObjectList
|
633
|
+
# [35] ObjectList ::= Object ( ',' Object )*
|
634
|
+
{
|
635
|
+
:start => lambda { |data|
|
636
|
+
# Called after Verb. The prod_data stack should have Subject and Verb elements
|
637
|
+
data[:Subject] = prod_data[:Subject]
|
638
|
+
error(nil, "Expected Subject", :production => :ObjectList) if validate?
|
639
|
+
error(nil, "Expected Verb", :production => :ObjectList) if validate?
|
640
|
+
data[:Subject] = prod_data[:Subject]
|
641
|
+
data[:Verb] = prod_data[:Verb].to_a.last
|
642
|
+
},
|
643
|
+
:finish => lambda { |data| add_prod_datum(:pattern, data[:pattern]) }
|
644
|
+
}
|
645
|
+
when :Object
|
646
|
+
# [36] Object ::= GraphNode
|
647
|
+
{
|
648
|
+
:finish => lambda { |data|
|
649
|
+
object = data[:VarOrTerm] || data[:TriplesNode] || data[:GraphNode]
|
650
|
+
if object
|
651
|
+
add_pattern(:Object, :subject => prod_data[:Subject], :predicate => prod_data[:Verb], :object => object)
|
652
|
+
add_prod_datum(:pattern, data[:pattern])
|
653
|
+
end
|
654
|
+
}
|
655
|
+
}
|
656
|
+
when :Verb
|
657
|
+
# [37] Verb ::= VarOrIRIref | 'a'
|
658
|
+
{
|
659
|
+
:finish => lambda { |data| data.values.each {|v| add_prod_datum(:Verb, v)} }
|
660
|
+
}
|
661
|
+
when :TriplesNode
|
662
|
+
# [38] TriplesNode ::= Collection | BlankNodePropertyList
|
663
|
+
#
|
664
|
+
# Allocate Blank Node for () or []
|
665
|
+
{
|
666
|
+
:start => lambda { |data| data[:TriplesNode] = gen_node() },
|
667
|
+
:finish => lambda { |data|
|
668
|
+
add_prod_datum(:pattern, data[:pattern])
|
669
|
+
add_prod_datum(:TriplesNode, data[:TriplesNode])
|
670
|
+
}
|
671
|
+
}
|
672
|
+
when :Collection
|
673
|
+
# [40] Collection ::= '(' GraphNode+ ')'
|
674
|
+
{
|
675
|
+
:start => lambda { |data| data[:Collection] = prod_data[:TriplesNode]},
|
676
|
+
:finish => lambda { |data| expand_collection(data) }
|
677
|
+
}
|
678
|
+
when :GraphNode
|
679
|
+
# [41] GraphNode ::= VarOrTerm | TriplesNode
|
680
|
+
{
|
681
|
+
:finish => lambda { |data|
|
682
|
+
term = data[:VarOrTerm] || data[:TriplesNode]
|
683
|
+
add_prod_datum(:pattern, data[:pattern])
|
684
|
+
add_prod_datum(:GraphNode, term)
|
685
|
+
}
|
686
|
+
}
|
687
|
+
when :VarOrTerm
|
688
|
+
# [42] VarOrTerm ::= Var | GraphTerm
|
689
|
+
{
|
690
|
+
:finish => lambda { |data| data.values.each {|v| add_prod_datum(:VarOrTerm, v)} }
|
691
|
+
}
|
692
|
+
when :VarOrIRIref
|
693
|
+
# [43] VarOrIRIref ::= Var | IRIref
|
694
|
+
{
|
695
|
+
:finish => lambda { |data| data.values.each {|v| add_prod_datum(:VarOrIRIref, v)} }
|
696
|
+
}
|
697
|
+
when :GraphTerm
|
698
|
+
# [45] GraphTerm ::= IRIref | RDFLiteral | NumericLiteral | BooleanLiteral | BlankNode | NIL
|
699
|
+
{
|
700
|
+
:finish => lambda { |data|
|
701
|
+
add_prod_datum(:GraphTerm, data[:IRIref] || data[:literal] || data[:BlankNode] || data[:NIL])
|
702
|
+
}
|
703
|
+
}
|
704
|
+
when :Expression
|
705
|
+
# [46] Expression ::= ConditionalOrExpression
|
706
|
+
{
|
707
|
+
:finish => lambda { |data| add_prod_datum(:Expression, data[:Expression]) }
|
708
|
+
}
|
709
|
+
when :ConditionalOrExpression
|
710
|
+
# [47] ConditionalOrExpression ::= ConditionalAndExpression ( '||' ConditionalAndExpression )*
|
711
|
+
{
|
712
|
+
:finish => lambda { |data| add_operator_expressions(:_OR, data) }
|
713
|
+
}
|
714
|
+
when :_OR_ConditionalAndExpression
|
715
|
+
# This part handles the operator and the rhs of a ConditionalAndExpression
|
716
|
+
{
|
717
|
+
:finish => lambda { |data| accumulate_operator_expressions(:ConditionalOrExpression, :_OR, data) }
|
718
|
+
}
|
719
|
+
when :ConditionalAndExpression
|
720
|
+
# [48] ConditionalAndExpression ::= ValueLogical ( '&&' ValueLogical )*
|
721
|
+
{
|
722
|
+
:finish => lambda { |data| add_operator_expressions(:_AND, data) }
|
723
|
+
}
|
724
|
+
when :_AND_ValueLogical_Star
|
725
|
+
# This part handles the operator and the rhs of a ConditionalAndExpression
|
726
|
+
{
|
727
|
+
:finish => lambda { |data| accumulate_operator_expressions(:ConditionalAndExpression, :_AND, data) }
|
728
|
+
}
|
729
|
+
when :RelationalExpression
|
730
|
+
# [50] RelationalExpression ::= NumericExpression (
|
731
|
+
# '=' NumericExpression
|
732
|
+
# | '!=' NumericExpression
|
733
|
+
# | '<' NumericExpression
|
734
|
+
# | '>' NumericExpression
|
735
|
+
# | '<=' NumericExpression
|
736
|
+
# | '>=' NumericExpression )?
|
737
|
+
#
|
738
|
+
{
|
739
|
+
:finish => lambda { |data|
|
740
|
+
if data[:_Compare_Numeric]
|
741
|
+
add_prod_datum(:Expression, Algebra::Expression.for(data[:_Compare_Numeric].insert(1, *data[:Expression])))
|
742
|
+
else
|
743
|
+
# NumericExpression with no comparitor
|
744
|
+
add_prod_datum(:Expression, data[:Expression])
|
745
|
+
end
|
746
|
+
}
|
747
|
+
}
|
748
|
+
when :_Compare_NumericExpression_Opt # ( '=' NumericExpression | '!=' NumericExpression | ... )?
|
749
|
+
# This part handles the operator and the rhs of a RelationalExpression
|
750
|
+
{
|
751
|
+
:finish => lambda { |data|
|
752
|
+
if data[:RelationalExpression]
|
753
|
+
add_prod_datum(:_Compare_Numeric, data[:RelationalExpression] + data[:Expression])
|
754
|
+
end
|
755
|
+
}
|
756
|
+
}
|
757
|
+
when :AdditiveExpression
|
758
|
+
# [52] AdditiveExpression ::= MultiplicativeExpression ( '+' MultiplicativeExpression | '-' MultiplicativeExpression )*
|
759
|
+
{
|
760
|
+
:finish => lambda { |data| add_operator_expressions(:_Add_Sub, data) }
|
761
|
+
}
|
762
|
+
when :_Add_Sub_MultiplicativeExpression_Star # ( '+' MultiplicativeExpression | '-' MultiplicativeExpression | ... )*
|
763
|
+
# This part handles the operator and the rhs of a AdditiveExpression
|
764
|
+
{
|
765
|
+
:finish => lambda { |data| accumulate_operator_expressions(:AdditiveExpression, :_Add_Sub, data) }
|
766
|
+
}
|
767
|
+
when :MultiplicativeExpression
|
768
|
+
# [53] MultiplicativeExpression ::= UnaryExpression ( '*' UnaryExpression | '/' UnaryExpression )*
|
769
|
+
{
|
770
|
+
:finish => lambda { |data| add_operator_expressions(:_Mul_Div, data) }
|
771
|
+
}
|
772
|
+
when :_Mul_Div_UnaryExpression_Star # ( '*' UnaryExpression | '/' UnaryExpression )*
|
773
|
+
# This part handles the operator and the rhs of a MultiplicativeExpression
|
774
|
+
{
|
775
|
+
# Mul or Div with prod_data[:Expression]
|
776
|
+
:finish => lambda { |data| accumulate_operator_expressions(:MultiplicativeExpression, :_Mul_Div, data) }
|
777
|
+
}
|
778
|
+
when :UnaryExpression
|
779
|
+
# [54] UnaryExpression ::= '!' PrimaryExpression | '+' PrimaryExpression | '-' PrimaryExpression | PrimaryExpression
|
780
|
+
{
|
781
|
+
:finish => lambda { |data|
|
782
|
+
case data[:UnaryExpression]
|
783
|
+
when [:"!"]
|
784
|
+
add_prod_datum(:Expression, Algebra::Expression[:not, data[:Expression].first])
|
785
|
+
when [:"-"]
|
786
|
+
e = data[:Expression].first
|
787
|
+
if e.is_a?(RDF::Literal::Numeric)
|
788
|
+
add_prod_datum(:Expression, -e) # Simple optimization to match ARQ generation
|
789
|
+
else
|
790
|
+
add_prod_datum(:Expression, Algebra::Expression[:minus, e])
|
791
|
+
end
|
792
|
+
else
|
793
|
+
add_prod_datum(:Expression, data[:Expression])
|
794
|
+
end
|
795
|
+
}
|
796
|
+
}
|
797
|
+
when :PrimaryExpression
|
798
|
+
# [55] PrimaryExpression ::= BrackettedExpression | BuiltInCall | IRIrefOrFunction | RDFLiteral | NumericLiteral | BooleanLiteral | Var
|
799
|
+
{
|
800
|
+
:finish => lambda { |data|
|
801
|
+
if data[:Expression]
|
802
|
+
add_prod_datum(:Expression, data[:Expression])
|
803
|
+
elsif data[:BuiltInCall]
|
804
|
+
add_prod_datum(:Expression, data[:BuiltInCall])
|
805
|
+
elsif data[:IRIref]
|
806
|
+
add_prod_datum(:Expression, data[:IRIref])
|
807
|
+
elsif data[:Function]
|
808
|
+
add_prod_datum(:Expression, data[:Function]) # Maintain array representation
|
809
|
+
elsif data[:literal]
|
810
|
+
add_prod_datum(:Expression, data[:literal])
|
811
|
+
elsif data[:Var]
|
812
|
+
add_prod_datum(:Expression, data[:Var])
|
813
|
+
end
|
814
|
+
|
815
|
+
add_prod_datum(:UnaryExpression, data[:UnaryExpression]) # Keep track of this for parent UnaryExpression production
|
816
|
+
}
|
817
|
+
}
|
818
|
+
when :BuiltInCall
|
819
|
+
# [57] BuiltInCall ::= 'STR' '(' Expression ')'
|
820
|
+
# | 'LANG' '(' Expression ')'
|
821
|
+
# | 'LANGMATCHES' '(' Expression ',' Expression ')'
|
822
|
+
# | 'DATATYPE' '(' Expression ')'
|
823
|
+
# | 'BOUND' '(' Var ')'
|
824
|
+
# | 'sameTerm' '(' Expression ',' Expression ')'
|
825
|
+
# | 'isIRI' '(' Expression ')'
|
826
|
+
# | 'isURI' '(' Expression ')'
|
827
|
+
# | 'isBLANK' '(' Expression ')'
|
828
|
+
# | 'isLITERAL' '(' Expression ')'
|
829
|
+
# | RegexExpression
|
830
|
+
{
|
831
|
+
:finish => lambda { |data|
|
832
|
+
if data[:regex]
|
833
|
+
add_prod_datum(:BuiltInCall, Algebra::Expression.for(data[:regex].unshift(:regex)))
|
834
|
+
elsif data[:BOUND]
|
835
|
+
add_prod_datum(:BuiltInCall, Algebra::Expression.for(data[:Var].unshift(:bound)))
|
836
|
+
elsif data[:BuiltInCall]
|
837
|
+
add_prod_datum(:BuiltInCall, Algebra::Expression.for(data[:BuiltInCall] + data[:Expression]))
|
838
|
+
end
|
839
|
+
}
|
840
|
+
}
|
841
|
+
when :RegexExpression
|
842
|
+
# [58] RegexExpression ::= 'REGEX' '(' Expression ',' Expression ( ',' Expression )? ')'
|
843
|
+
{
|
844
|
+
:finish => lambda { |data| add_prod_datum(:regex, data[:Expression]) }
|
845
|
+
}
|
846
|
+
when :IRIrefOrFunction
|
847
|
+
# [59] IRIrefOrFunction ::= IRIref ArgList?
|
848
|
+
{
|
849
|
+
:finish => lambda { |data|
|
850
|
+
if data.has_key?(:ArgList)
|
851
|
+
# Function is (func arg1 arg2 ...)
|
852
|
+
add_prod_data(:Function, data[:IRIref] + data[:ArgList])
|
853
|
+
else
|
854
|
+
add_prod_datum(:IRIref, data[:IRIref])
|
855
|
+
end
|
856
|
+
}
|
857
|
+
}
|
858
|
+
when :RDFLiteral
|
859
|
+
# [60] RDFLiteral ::= String ( LANGTAG | ( '^^' IRIref ) )?
|
860
|
+
{
|
861
|
+
:finish => lambda { |data|
|
862
|
+
if data[:string]
|
863
|
+
lit = data.dup
|
864
|
+
str = lit.delete(:string).last
|
865
|
+
lit[:datatype] = lit.delete(:IRIref).last if lit[:IRIref]
|
866
|
+
lit[:language] = lit.delete(:language).last.downcase if lit[:language]
|
867
|
+
add_prod_datum(:literal, RDF::Literal.new(str, lit)) if str
|
868
|
+
end
|
869
|
+
}
|
870
|
+
}
|
871
|
+
when :NumericLiteralPositive
|
872
|
+
# [63] NumericLiteralPositive ::= INTEGER_POSITIVE | DECIMAL_POSITIVE | DOUBLE_POSITIVE
|
873
|
+
{
|
874
|
+
:finish => lambda { |data|
|
875
|
+
num = data.values.flatten.last
|
876
|
+
add_prod_datum(:literal, num.class.new("+#{num.value}"))
|
877
|
+
add_prod_datum(:UnaryExpression, data[:UnaryExpression]) # Keep track of this for parent UnaryExpression production
|
878
|
+
}
|
879
|
+
}
|
880
|
+
when :NumericLiteralNegative
|
881
|
+
# [64] NumericLiteralNegative ::= INTEGER_NEGATIVE | DECIMAL_NEGATIVE | DOUBLE_NEGATIVE
|
882
|
+
{
|
883
|
+
:finish => lambda { |data|
|
884
|
+
num = data.values.flatten.last
|
885
|
+
add_prod_datum(:literal, num.class.new("-#{num.value}"))
|
886
|
+
add_prod_datum(:UnaryExpression, data[:UnaryExpression]) # Keep track of this for parent UnaryExpression production
|
887
|
+
}
|
888
|
+
}
|
889
|
+
when :IRIref
|
890
|
+
# [67] IRIref ::= IRI_REF | PrefixedName
|
891
|
+
{
|
892
|
+
:finish => lambda { |data| add_prod_datum(:IRIref, data[:iri]) }
|
893
|
+
}
|
894
|
+
when :PrefixedName
|
895
|
+
# [68] PrefixedName ::= PNAME_LN | PNAME_NS
|
896
|
+
{
|
897
|
+
:finish => lambda { |data| add_prod_datum(:iri, data[:PrefixedName]) }
|
898
|
+
}
|
899
|
+
end
|
900
|
+
end
|
901
|
+
|
902
|
+
# Start for production
|
903
|
+
def onStart(prod)
|
904
|
+
context = contexts(prod.to_sym)
|
905
|
+
@productions << prod
|
906
|
+
if context
|
907
|
+
# Create a new production data element, potentially allowing handler to customize before pushing on the @prod_data stack
|
908
|
+
progress("#{prod}(:start):#{@prod_data.length}", prod_data)
|
909
|
+
data = {}
|
910
|
+
context[:start].call(data) if context.has_key?(:start)
|
911
|
+
@prod_data << data
|
912
|
+
else
|
913
|
+
progress("#{prod}(:start)", '')
|
914
|
+
end
|
915
|
+
#puts @prod_data.inspect
|
916
|
+
end
|
917
|
+
|
918
|
+
# Finish of production
|
919
|
+
def onFinish
|
920
|
+
prod = @productions.pop()
|
921
|
+
context = contexts(prod.to_sym)
|
922
|
+
if context
|
923
|
+
# Pop production data element from stack, potentially allowing handler to use it
|
924
|
+
data = @prod_data.pop
|
925
|
+
context[:finish].call(data) if context.has_key?(:finish)
|
926
|
+
progress("#{prod}(:finish):#{@prod_data.length}", prod_data, :depth => (@productions.length + 1))
|
927
|
+
else
|
928
|
+
progress("#{prod}(:finish)", '', :depth => (@productions.length + 1))
|
929
|
+
end
|
930
|
+
end
|
931
|
+
|
932
|
+
# Handlers for individual tokens based on production
|
933
|
+
def token_productions(parent_production, production)
|
934
|
+
case parent_production
|
935
|
+
when :_Add_Sub_MultiplicativeExpression_Star
|
936
|
+
case production
|
937
|
+
when :"+", :"-"
|
938
|
+
lambda { |token| add_prod_datum(:AdditiveExpression, production) }
|
939
|
+
end
|
940
|
+
when :UnaryExpression
|
941
|
+
case production
|
942
|
+
when :"!", :"+", :"-"
|
943
|
+
lambda { |token| add_prod_datum(:UnaryExpression, production) }
|
944
|
+
end
|
945
|
+
when :NumericLiteralPositive, :NumericLiteralNegative, :NumericLiteral
|
946
|
+
case production
|
947
|
+
when :"+", :"-"
|
948
|
+
lambda { |token| add_prod_datum(:NumericLiteral, production) }
|
949
|
+
end
|
950
|
+
else
|
951
|
+
# Generic tokens that don't depend on a particular production
|
952
|
+
case production
|
953
|
+
when :a
|
954
|
+
lambda { |token| add_prod_datum(:Verb, RDF_TYPE) }
|
955
|
+
when :ANON
|
956
|
+
lambda { |token| add_prod_datum(:BlankNode, gen_node()) }
|
957
|
+
when :ASC, :DESC
|
958
|
+
lambda { |token| add_prod_datum(:OrderDirection, token.downcase.to_sym) }
|
959
|
+
when :BLANK_NODE_LABEL
|
960
|
+
lambda { |token| add_prod_datum(:BlankNode, gen_node(token)) }
|
961
|
+
when :BooleanLiteral
|
962
|
+
lambda { |token|
|
963
|
+
add_prod_datum(:literal, RDF::Literal.new(token, :datatype => RDF::XSD.boolean))
|
964
|
+
}
|
965
|
+
when :BOUND
|
966
|
+
lambda { |token| add_prod_datum(:BOUND, :bound) }
|
967
|
+
when :DATATYPE
|
968
|
+
lambda { |token| add_prod_datum(:BuiltInCall, :datatype) }
|
969
|
+
when :DECIMAL
|
970
|
+
lambda { |token| add_prod_datum(:literal, RDF::Literal.new(token, :datatype => RDF::XSD.decimal)) }
|
971
|
+
when :DISTINCT, :REDUCED
|
972
|
+
lambda { |token| add_prod_datum(:DISTINCT_REDUCED, token.downcase.to_sym) }
|
973
|
+
when :DOUBLE
|
974
|
+
lambda { |token| add_prod_datum(:literal, RDF::Literal.new(token, :datatype => RDF::XSD.double)) }
|
975
|
+
when :INTEGER
|
976
|
+
lambda { |token| add_prod_datum(:literal, RDF::Literal.new(token, :datatype => RDF::XSD.integer)) }
|
977
|
+
when :IRI_REF
|
978
|
+
lambda { |token| add_prod_datum(:iri, uri(token)) }
|
979
|
+
when :ISBLANK
|
980
|
+
lambda { |token| add_prod_datum(:BuiltInCall, :isBLANK) }
|
981
|
+
when :ISLITERAL
|
982
|
+
lambda { |token| add_prod_datum(:BuiltInCall, :isLITERAL) }
|
983
|
+
when :ISIRI
|
984
|
+
lambda { |token| add_prod_datum(:BuiltInCall, :isIRI) }
|
985
|
+
when :ISURI
|
986
|
+
lambda { |token| add_prod_datum(:BuiltInCall, :isURI) }
|
987
|
+
when :LANG
|
988
|
+
lambda { |token| add_prod_datum(:BuiltInCall, :lang) }
|
989
|
+
when :LANGMATCHES
|
990
|
+
lambda { |token| add_prod_datum(:BuiltInCall, :langMatches) }
|
991
|
+
when :LANGTAG
|
992
|
+
lambda { |token| add_prod_datum(:language, token) }
|
993
|
+
when :NIL
|
994
|
+
lambda { |token| add_prod_datum(:NIL, RDF["nil"]) }
|
995
|
+
when :PNAME_LN
|
996
|
+
lambda { |token| add_prod_datum(:PrefixedName, ns(*token)) }
|
997
|
+
when :PNAME_NS
|
998
|
+
lambda { |token|
|
999
|
+
add_prod_datum(:PrefixedName, ns(token, nil)) # [68] PrefixedName ::= PNAME_LN | PNAME_NS
|
1000
|
+
prod_data[:prefix] = token && token.to_sym # [4] PrefixDecl := 'PREFIX' PNAME_NS IRI_REF";
|
1001
|
+
}
|
1002
|
+
when :STR
|
1003
|
+
lambda { |token| add_prod_datum(:BuiltInCall, :str) }
|
1004
|
+
when :SAMETERM
|
1005
|
+
lambda { |token| add_prod_datum(:BuiltInCall, :sameTerm) }
|
1006
|
+
when :STRING_LITERAL1, :STRING_LITERAL2, :STRING_LITERAL_LONG1, :STRING_LITERAL_LONG2
|
1007
|
+
lambda { |token| add_prod_datum(:string, token) }
|
1008
|
+
when :VAR1, :VAR2 # [44] Var ::= VAR1 | VAR2
|
1009
|
+
lambda { |token| add_prod_datum(:Var, variable(token, true)) }
|
1010
|
+
when :"*", :"/"
|
1011
|
+
lambda { |token| add_prod_datum(:MultiplicativeExpression, production) }
|
1012
|
+
when :"=", :"!=", :"<", :">", :"<=", :">="
|
1013
|
+
lambda { |token| add_prod_datum(:RelationalExpression, production) }
|
1014
|
+
when :"&&"
|
1015
|
+
lambda { |token| add_prod_datum(:ConditionalAndExpression, production) }
|
1016
|
+
when :"||"
|
1017
|
+
lambda { |token| add_prod_datum(:ConditionalOrExpression, production) }
|
1018
|
+
end
|
1019
|
+
end
|
1020
|
+
end
|
1021
|
+
|
1022
|
+
# A token
|
1023
|
+
def onToken(prod, token)
|
1024
|
+
unless @productions.empty?
|
1025
|
+
parentProd = @productions.last
|
1026
|
+
token_production = token_productions(parentProd.to_sym, prod.to_sym)
|
1027
|
+
if token_production
|
1028
|
+
token_production.call(token)
|
1029
|
+
progress("#{prod}<#{parentProd}(:token)", "#{token}: #{prod_data}", :depth => (@productions.length + 1))
|
1030
|
+
else
|
1031
|
+
progress("#{prod}<#{parentProd}(:token)", token, :depth => (@productions.length + 1))
|
1032
|
+
end
|
1033
|
+
else
|
1034
|
+
error("#{parentProd}(:token)", "Token has no parent production", :production => prod)
|
1035
|
+
end
|
1036
|
+
end
|
1037
|
+
|
1038
|
+
# Current ProdData element
|
1039
|
+
def prod_data; @prod_data.last; end
|
1040
|
+
|
1041
|
+
# @param [String] str Error string
|
1042
|
+
# @param [Hash] options
|
1043
|
+
# @option options [URI, #to_s] :production
|
1044
|
+
# @option options [Token] :token
|
1045
|
+
def error(node, message, options = {})
|
1046
|
+
depth = options[:depth] || @productions.length
|
1047
|
+
node ||= options[:production]
|
1048
|
+
raise Error.new("Error on production #{options[:production].inspect}#{' with input ' + options[:token].inspect if options[:token]} at line #{@lineno}: #{message}", options)
|
1049
|
+
end
|
1050
|
+
|
1051
|
+
##
|
1052
|
+
# Progress output when parsing
|
1053
|
+
# @param [String] str
|
1054
|
+
def progress(node, message, options = {})
|
1055
|
+
depth = options[:depth] || @productions.length
|
1056
|
+
$stderr.puts("[#{@lineno}]#{' ' * depth}#{node}: #{message}") if @options[:progress]
|
1057
|
+
end
|
1058
|
+
|
1059
|
+
##
|
1060
|
+
# Progress output when debugging
|
1061
|
+
#
|
1062
|
+
# @overload: May be called with node, message and an option hash
|
1063
|
+
# @param [String] node processing node
|
1064
|
+
# @param [String] message
|
1065
|
+
# @param [Hash{Symbol => Object}] options
|
1066
|
+
# @option options [Boolean] :debug output debug messages to $stderr
|
1067
|
+
# @option options [Integer] :depth (@productions.length)
|
1068
|
+
# Processing depth for indenting message output.
|
1069
|
+
# @yieldreturn [String] appended to message, to allow for lazy-evaulation of message
|
1070
|
+
#
|
1071
|
+
# @overload: May be called with node and an option hash
|
1072
|
+
# @param [String] node processing node
|
1073
|
+
# @param [Hash{Symbol => Object}] options
|
1074
|
+
# @option options [Boolean] :debug output debug messages to $stderr
|
1075
|
+
# @option options [Integer] :depth (@productions.length)
|
1076
|
+
# Processing depth for indenting message output.
|
1077
|
+
# @yieldreturn [String] appended to message, to allow for lazy-evaulation of message
|
1078
|
+
#
|
1079
|
+
# @overload: May be called with only options, in which case the block is used to return the output message
|
1080
|
+
# @param [String] node processing node
|
1081
|
+
# @param [Hash{Symbol => Object}] options
|
1082
|
+
# @option options [Boolean] :debug output debug messages to $stderr
|
1083
|
+
# @option options [Integer] :depth (@productions.length)
|
1084
|
+
# Processing depth for indenting message output.
|
1085
|
+
# @yieldreturn [String] appended to message, to allow for lazy-evaulation of message
|
1086
|
+
def debug(*args)
|
1087
|
+
options = args.last.is_a?(Hash) ? args.pop : @options
|
1088
|
+
return unless options[:debug]
|
1089
|
+
message = args.join(": ")
|
1090
|
+
message = message + yield if block_given?
|
1091
|
+
depth = options[:depth] || @productions.length
|
1092
|
+
$stderr.puts("[#{@lineno}]#{' ' * depth}#{message}") if options[:debug]
|
1093
|
+
end
|
1094
|
+
|
1095
|
+
# [1] Query ::= Prologue ( SelectQuery | ConstructQuery | DescribeQuery | AskQuery )
|
1096
|
+
#
|
1097
|
+
# Generate an S-Exp for the final query
|
1098
|
+
# Inputs are :BaseDecl, :PrefixDecl, and :query
|
1099
|
+
def finalize_query(data)
|
1100
|
+
return unless data[:query]
|
1101
|
+
|
1102
|
+
query = data[:query].first
|
1103
|
+
|
1104
|
+
query = Algebra::Expression[:prefix, data[:PrefixDecl].first, query] if data[:PrefixDecl]
|
1105
|
+
query = Algebra::Expression[:base, data[:BaseDecl].first, query] if data[:BaseDecl]
|
1106
|
+
add_prod_datum(:query, query)
|
1107
|
+
end
|
1108
|
+
|
1109
|
+
# [40] Collection ::= '(' GraphNode+ ')'
|
1110
|
+
#
|
1111
|
+
# Take collection of objects and create RDF Collection using rdf:first, rdf:rest and rdf:nil
|
1112
|
+
# @param [Hash] data Production Data
|
1113
|
+
def expand_collection(data)
|
1114
|
+
# Add any triples generated from deeper productions
|
1115
|
+
add_prod_datum(:pattern, data[:pattern])
|
1116
|
+
|
1117
|
+
# Create list items for each element in data[:GraphNode]
|
1118
|
+
first = col = data[:Collection]
|
1119
|
+
list = data[:GraphNode].to_a.flatten.compact
|
1120
|
+
last = list.pop
|
1121
|
+
|
1122
|
+
list.each do |r|
|
1123
|
+
add_pattern(:Collection, :subject => first, :predicate => RDF["first"], :object => r)
|
1124
|
+
rest = gen_node()
|
1125
|
+
add_pattern(:Collection, :subject => first, :predicate => RDF["rest"], :object => rest)
|
1126
|
+
first = rest
|
1127
|
+
end
|
1128
|
+
|
1129
|
+
if last
|
1130
|
+
add_pattern(:Collection, :subject => first, :predicate => RDF["first"], :object => last)
|
1131
|
+
end
|
1132
|
+
add_pattern(:Collection, :subject => first, :predicate => RDF["rest"], :object => RDF["nil"])
|
1133
|
+
end
|
1134
|
+
|
1135
|
+
# Class method version to aid in specs
|
1136
|
+
def self.variable(id, distinguished = true)
|
1137
|
+
Parser.new.send(:variable, id, distinguished)
|
1138
|
+
end
|
1139
|
+
|
1140
|
+
def abbr(prodURI)
|
1141
|
+
prodURI.to_s.split('#').last
|
1142
|
+
end
|
1143
|
+
|
1144
|
+
##
|
1145
|
+
# @param [Symbol, String] type_or_value
|
1146
|
+
# @return [Token]
|
1147
|
+
def accept(type_or_value)
|
1148
|
+
if (token = tokens.first) && token === type_or_value
|
1149
|
+
tokens.shift
|
1150
|
+
end
|
1151
|
+
end
|
1152
|
+
|
1153
|
+
##
|
1154
|
+
# @return [void]
|
1155
|
+
def fail
|
1156
|
+
false
|
1157
|
+
end
|
1158
|
+
alias_method :fail!, :fail
|
1159
|
+
|
1160
|
+
# Flatten a Data in form of :filter => [op+ bgp?], without a query into filter and query creating exprlist, if necessary
|
1161
|
+
# @return [Array[:expr, query]]
|
1162
|
+
def flatten_filter(data)
|
1163
|
+
query = data.pop if data.last.respond_to?(:execute)
|
1164
|
+
expr = data.length > 1 ? Algebra::Operator::Exprlist.new(*data) : data.first
|
1165
|
+
[expr, query]
|
1166
|
+
end
|
1167
|
+
|
1168
|
+
# Merge query modifiers, datasets, and projections
|
1169
|
+
def merge_modifiers(data)
|
1170
|
+
query = data[:query] ? data[:query].first : Algebra::Operator::BGP.new
|
1171
|
+
|
1172
|
+
# Add datasets and modifiers in order
|
1173
|
+
query = Algebra::Expression[:order, data[:order].first, query] if data[:order]
|
1174
|
+
|
1175
|
+
query = Algebra::Expression[:project, data[:Var], query] if data[:Var] # project
|
1176
|
+
|
1177
|
+
query = Algebra::Expression[data[:DISTINCT_REDUCED].first, query] if data[:DISTINCT_REDUCED]
|
1178
|
+
|
1179
|
+
query = Algebra::Expression[:slice, data[:slice][0], data[:slice][1], query] if data[:slice]
|
1180
|
+
|
1181
|
+
query = Algebra::Expression[:dataset, data[:dataset], query] if data[:dataset]
|
1182
|
+
|
1183
|
+
query
|
1184
|
+
end
|
1185
|
+
|
1186
|
+
# Add joined expressions in for prod1 (op prod2)* to form (op (op 1 2) 3)
|
1187
|
+
def add_operator_expressions(production, data)
|
1188
|
+
# Iterate through expression to create binary operations
|
1189
|
+
res = data[:Expression]
|
1190
|
+
while data[production] && !data[production].empty?
|
1191
|
+
res = Algebra::Expression[data[production].shift + res + data[production].shift]
|
1192
|
+
end
|
1193
|
+
add_prod_datum(:Expression, res)
|
1194
|
+
end
|
1195
|
+
|
1196
|
+
# Accumulate joined expressions in for prod1 (op prod2)* to form (op (op 1 2) 3)
|
1197
|
+
def accumulate_operator_expressions(operator, production, data)
|
1198
|
+
if data[operator]
|
1199
|
+
# Add [op data] to stack based on "production"
|
1200
|
+
add_prod_datum(production, [data[operator], data[:Expression]])
|
1201
|
+
# Add previous [op data] information
|
1202
|
+
add_prod_datum(production, data[production])
|
1203
|
+
else
|
1204
|
+
# No operator, forward :Expression
|
1205
|
+
add_prod_datum(:Expression, data[:Expression])
|
1206
|
+
end
|
1207
|
+
end
|
1208
|
+
|
1209
|
+
# Add a single value to prod_data, allows for values to be an array
|
1210
|
+
def add_prod_datum(sym, values)
|
1211
|
+
case values
|
1212
|
+
when Array
|
1213
|
+
prod_data[sym] ||= []
|
1214
|
+
debug("add_prod_datum(#{sym})") {"#{prod_data[sym].inspect} += #{values.inspect}"}
|
1215
|
+
prod_data[sym] += values
|
1216
|
+
when nil
|
1217
|
+
return
|
1218
|
+
else
|
1219
|
+
prod_data[sym] ||= []
|
1220
|
+
debug("add_prod_datum(#{sym})") {"#{prod_data[sym].inspect} << #{values.inspect}"}
|
1221
|
+
prod_data[sym] << values
|
1222
|
+
end
|
1223
|
+
end
|
1224
|
+
|
1225
|
+
# Add values to production data, values aranged as an array
|
1226
|
+
def add_prod_data(sym, *values)
|
1227
|
+
return if values.compact.empty?
|
1228
|
+
|
1229
|
+
prod_data[sym] ||= []
|
1230
|
+
prod_data[sym] += values
|
1231
|
+
debug("add_prod_data(#{sym})") {"#{prod_data[sym].inspect} += #{values.inspect}"}
|
1232
|
+
end
|
1233
|
+
|
1234
|
+
# Generate a BNode identifier
|
1235
|
+
def gen_node(id = nil)
|
1236
|
+
if @nd_var_gen
|
1237
|
+
# Use non-distinguished variables within patterns
|
1238
|
+
variable(id, false)
|
1239
|
+
else
|
1240
|
+
unless id
|
1241
|
+
id = @options[:anon_base]
|
1242
|
+
@options[:anon_base] = @options[:anon_base].succ
|
1243
|
+
end
|
1244
|
+
RDF::Node.new(id)
|
1245
|
+
end
|
1246
|
+
end
|
1247
|
+
|
1248
|
+
##
|
1249
|
+
# Return variable allocated to an ID.
|
1250
|
+
# If no ID is provided, a new variable
|
1251
|
+
# is allocated. Otherwise, any previous assignment will be used.
|
1252
|
+
#
|
1253
|
+
# The variable has a #distinguished? method applied depending on if this
|
1254
|
+
# is a disinguished or non-distinguished variable. Non-distinguished
|
1255
|
+
# variables are effectively the same as BNodes.
|
1256
|
+
# @return [RDF::Query::Variable]
|
1257
|
+
def variable(id, distinguished = true)
|
1258
|
+
id = nil if id.to_s.empty?
|
1259
|
+
|
1260
|
+
if id
|
1261
|
+
@vars[id] ||= begin
|
1262
|
+
v = RDF::Query::Variable.new(id)
|
1263
|
+
v.distinguished = distinguished
|
1264
|
+
v
|
1265
|
+
end
|
1266
|
+
else
|
1267
|
+
unless distinguished
|
1268
|
+
# Allocate a non-distinguished variable identifier
|
1269
|
+
id = @nd_var_gen
|
1270
|
+
@nd_var_gen = id.succ
|
1271
|
+
end
|
1272
|
+
v = RDF::Query::Variable.new(id)
|
1273
|
+
v.distinguished = distinguished
|
1274
|
+
v
|
1275
|
+
end
|
1276
|
+
end
|
1277
|
+
|
1278
|
+
# Create URIs
|
1279
|
+
def uri(value)
|
1280
|
+
# If we have a base URI, use that when constructing a new URI
|
1281
|
+
uri = if self.base_uri
|
1282
|
+
u = self.base_uri.join(value.to_s)
|
1283
|
+
u.lexical = "<#{value}>" unless u.to_s == value.to_s || options[:resolve_uris]
|
1284
|
+
u
|
1285
|
+
else
|
1286
|
+
RDF::URI(value)
|
1287
|
+
end
|
1288
|
+
|
1289
|
+
#uri.validate! if validate? && uri.respond_to?(:validate)
|
1290
|
+
#uri.canonicalize! if canonicalize?
|
1291
|
+
#uri = RDF::URI.intern(uri) if intern?
|
1292
|
+
uri
|
1293
|
+
end
|
1294
|
+
|
1295
|
+
def ns(prefix, suffix)
|
1296
|
+
base = prefix(prefix).to_s
|
1297
|
+
suffix = suffix.to_s.sub(/^\#/, "") if base.index("#")
|
1298
|
+
debug {"ns(#{prefix.inspect}): base: '#{base}', suffix: '#{suffix}'"}
|
1299
|
+
uri = uri(base + suffix.to_s)
|
1300
|
+
# Cause URI to be serialized as a lexical
|
1301
|
+
uri.lexical = "#{prefix}:#{suffix}" unless options[:resolve_uris]
|
1302
|
+
uri
|
1303
|
+
end
|
1304
|
+
|
1305
|
+
# add a pattern
|
1306
|
+
#
|
1307
|
+
# @param [String] production:: Production generating pattern
|
1308
|
+
# @param [RDF::Term] subject:: the subject of the pattern
|
1309
|
+
# @param [RDF::Term] predicate:: the predicate of the pattern
|
1310
|
+
# @param [RDF::Term, Node, Literal] object:: the object of the pattern
|
1311
|
+
def add_pattern(production, options)
|
1312
|
+
progress(production, "add_pattern: #{options.inspect}")
|
1313
|
+
progress(production, "[:pattern, #{options[:subject]}, #{options[:predicate]}, #{options[:object]}]")
|
1314
|
+
triple = {}
|
1315
|
+
options.each_pair do |r, v|
|
1316
|
+
if v.is_a?(Array) && v.flatten.length == 1
|
1317
|
+
v = v.flatten.first
|
1318
|
+
end
|
1319
|
+
if validate? && !v.is_a?(RDF::Term)
|
1320
|
+
error("add_pattern", "Expected #{r} to be a resource, but it was #{v.inspect}",
|
1321
|
+
:production => production)
|
1322
|
+
end
|
1323
|
+
triple[r] = v
|
1324
|
+
end
|
1325
|
+
add_prod_datum(:pattern, RDF::Query::Pattern.new(triple))
|
1326
|
+
end
|
1327
|
+
|
1328
|
+
instance_methods.each { |method| public method } # DEBUG
|
1329
|
+
|
1330
|
+
public
|
1331
|
+
##
|
1332
|
+
# Raised for errors during parsing.
|
1333
|
+
#
|
1334
|
+
# @example Raising a parser error
|
1335
|
+
# raise SPARQL::Grammar::Parser::Error.new(
|
1336
|
+
# "FIXME on line 10",
|
1337
|
+
# :input => query, :production => '%', :lineno => 9)
|
1338
|
+
#
|
1339
|
+
# @see http://ruby-doc.org/core/classes/StandardError.html
|
1340
|
+
class Error < StandardError
|
1341
|
+
##
|
1342
|
+
# The input string associated with the error.
|
1343
|
+
#
|
1344
|
+
# @return [String]
|
1345
|
+
attr_reader :input
|
1346
|
+
|
1347
|
+
##
|
1348
|
+
# The grammar production where the error was found.
|
1349
|
+
#
|
1350
|
+
# @return [String]
|
1351
|
+
attr_reader :production
|
1352
|
+
|
1353
|
+
##
|
1354
|
+
# The line number where the error occurred.
|
1355
|
+
#
|
1356
|
+
# @return [Integer]
|
1357
|
+
attr_reader :lineno
|
1358
|
+
|
1359
|
+
##
|
1360
|
+
# Position within line of error.
|
1361
|
+
#
|
1362
|
+
# @return [Integer]
|
1363
|
+
attr_reader :position
|
1364
|
+
|
1365
|
+
##
|
1366
|
+
# Initializes a new lexer error instance.
|
1367
|
+
#
|
1368
|
+
# @param [String, #to_s] message
|
1369
|
+
# @param [Hash{Symbol => Object}] options
|
1370
|
+
# @option options [String] :input (nil)
|
1371
|
+
# @option options [String] :production (nil)
|
1372
|
+
# @option options [Integer] :lineno (nil)
|
1373
|
+
# @option options [Integer] :position (nil)
|
1374
|
+
def initialize(message, options = {})
|
1375
|
+
@input = options[:input]
|
1376
|
+
@production = options[:production]
|
1377
|
+
@lineno = options[:lineno]
|
1378
|
+
@position = options[:position]
|
1379
|
+
super(message.to_s)
|
1380
|
+
end
|
1381
|
+
end # class Error
|
1382
|
+
end # class Parser
|
1383
|
+
end; end # module SPARQL::Grammar
|