antlr3 1.8.0 → 1.8.2
Sign up to get free protection for your applications and to get access to all the features.
- data/History.txt +35 -0
- data/Manifest.txt +73 -0
- data/README.txt +6 -13
- data/java/RubyTarget.java +43 -19
- data/java/antlr-full-3.2.1.jar +0 -0
- data/lib/antlr3/debug.rb +2 -0
- data/lib/antlr3/debug/event-hub.rb +55 -55
- data/lib/antlr3/debug/record-event-listener.rb +2 -2
- data/lib/antlr3/debug/rule-tracer.rb +14 -14
- data/lib/antlr3/debug/socket.rb +47 -47
- data/lib/antlr3/debug/trace-event-listener.rb +8 -8
- data/lib/antlr3/main.rb +29 -9
- data/lib/antlr3/modes/ast-builder.rb +7 -7
- data/lib/antlr3/modes/filter.rb +19 -17
- data/lib/antlr3/profile.rb +34 -6
- data/lib/antlr3/recognizers.rb +50 -1
- data/lib/antlr3/streams.rb +19 -15
- data/lib/antlr3/streams/rewrite.rb +241 -229
- data/lib/antlr3/template/group-file-lexer.rb +6 -8
- data/lib/antlr3/template/group-file-parser.rb +16 -16
- data/lib/antlr3/template/group-file.rb +1 -1
- data/lib/antlr3/test/call-stack.rb +13 -13
- data/lib/antlr3/test/core-extensions.rb +69 -69
- data/lib/antlr3/test/functional.rb +0 -4
- data/lib/antlr3/test/grammar.rb +70 -70
- data/lib/antlr3/token.rb +41 -17
- data/lib/antlr3/tree.rb +11 -14
- data/lib/antlr3/tree/debug.rb +53 -53
- data/lib/antlr3/tree/visitor.rb +11 -11
- data/lib/antlr3/tree/wizard.rb +35 -35
- data/lib/antlr3/util.rb +18 -0
- data/lib/antlr3/version.rb +1 -1
- data/rakefile +1 -0
- data/samples/ANTLRv3Grammar.g +3 -3
- data/samples/JavaScript.g +702 -0
- data/samples/standard/C/C.g +543 -0
- data/samples/standard/C/C.tokens +175 -0
- data/samples/standard/C/C__testrig.st +0 -0
- data/samples/standard/C/c.rb +12 -0
- data/samples/standard/C/input +3479 -0
- data/samples/standard/C/output +171 -0
- data/samples/standard/LL-star/LLStar.g +101 -0
- data/samples/standard/LL-star/input +12 -0
- data/samples/standard/LL-star/ll-star.rb +12 -0
- data/samples/standard/LL-star/output +2 -0
- data/samples/standard/calc/Calculator.g +47 -0
- data/samples/standard/calc/Calculator.py +16 -0
- data/samples/standard/calc/Calculator.rb +28 -0
- data/samples/standard/cminus/CMinus.g +141 -0
- data/samples/standard/cminus/bytecode.group +80 -0
- data/samples/standard/cminus/cminus.rb +16 -0
- data/samples/standard/cminus/input +9 -0
- data/samples/standard/cminus/java.group +91 -0
- data/samples/standard/cminus/output +11 -0
- data/samples/standard/cminus/python.group +48 -0
- data/samples/standard/dynamic-scope/DynamicScopes.g +50 -0
- data/samples/standard/dynamic-scope/dynamic-scopes.rb +12 -0
- data/samples/standard/dynamic-scope/input +7 -0
- data/samples/standard/dynamic-scope/output +4 -0
- data/samples/standard/fuzzy/FuzzyJava.g +89 -0
- data/samples/standard/fuzzy/fuzzy.py +11 -0
- data/samples/standard/fuzzy/fuzzy.rb +9 -0
- data/samples/standard/fuzzy/input +13 -0
- data/samples/standard/fuzzy/output +12 -0
- data/samples/standard/hoisted-predicates/HoistedPredicates.g +40 -0
- data/samples/standard/hoisted-predicates/hoisted-predicates.rb +13 -0
- data/samples/standard/hoisted-predicates/input +1 -0
- data/samples/standard/hoisted-predicates/output +1 -0
- data/samples/standard/island-grammar/Javadoc.g +46 -0
- data/samples/standard/island-grammar/Simple.g +104 -0
- data/samples/standard/island-grammar/input +11 -0
- data/samples/standard/island-grammar/island.rb +12 -0
- data/samples/standard/island-grammar/output +16 -0
- data/samples/standard/java/Java.g +827 -0
- data/samples/standard/java/input +80 -0
- data/samples/standard/java/java.rb +13 -0
- data/samples/standard/java/output +1 -0
- data/samples/standard/python/Python.g +718 -0
- data/samples/standard/python/PythonTokenSource.rb +107 -0
- data/samples/standard/python/input +210 -0
- data/samples/standard/python/output +24 -0
- data/samples/standard/python/python.rb +14 -0
- data/samples/standard/rakefile +18 -0
- data/samples/standard/scopes/SymbolTable.g +66 -0
- data/samples/standard/scopes/input +12 -0
- data/samples/standard/scopes/output +3 -0
- data/samples/standard/scopes/scopes.rb +12 -0
- data/samples/standard/simplecTreeParser/SimpleC.g +113 -0
- data/samples/standard/simplecTreeParser/SimpleCWalker.g +64 -0
- data/samples/standard/simplecTreeParser/input +12 -0
- data/samples/standard/simplecTreeParser/output +1 -0
- data/samples/standard/simplecTreeParser/simplec.rb +18 -0
- data/samples/standard/treeparser/Lang.g +24 -0
- data/samples/standard/treeparser/LangDumpDecl.g +17 -0
- data/samples/standard/treeparser/input +1 -0
- data/samples/standard/treeparser/output +2 -0
- data/samples/standard/treeparser/treeparser.rb +18 -0
- data/samples/standard/tweak/Tweak.g +68 -0
- data/samples/standard/tweak/input +9 -0
- data/samples/standard/tweak/output +16 -0
- data/samples/standard/tweak/tweak.rb +13 -0
- data/samples/standard/xml/README +16 -0
- data/samples/standard/xml/XML.g +123 -0
- data/samples/standard/xml/input +21 -0
- data/samples/standard/xml/output +39 -0
- data/samples/standard/xml/xml.rb +9 -0
- data/templates/Ruby.stg +4 -4
- data/test/functional/ast-output/auto-ast.rb +0 -5
- data/test/functional/ast-output/rewrites.rb +4 -4
- data/test/unit/test-scope.rb +45 -0
- metadata +96 -8
data/lib/antlr3/test/grammar.rb
CHANGED
@@ -24,17 +24,17 @@ module DependantFile
|
|
24
24
|
@dependencies ||= GLOBAL_DEPENDENCIES.clone
|
25
25
|
end
|
26
26
|
|
27
|
-
def depends_on(path)
|
27
|
+
def depends_on( path )
|
28
28
|
path = File.expand_path path.to_s
|
29
|
-
dependencies << path if test(?f, path)
|
29
|
+
dependencies << path if test( ?f, path )
|
30
30
|
return path
|
31
31
|
end
|
32
32
|
|
33
33
|
def stale?
|
34
|
-
force and return(true)
|
34
|
+
force and return( true )
|
35
35
|
target_files.any? do |target|
|
36
|
-
not test(?f, target) or
|
37
|
-
dependencies.any? { |dep| test(?>, dep, target) }
|
36
|
+
not test( ?f, target ) or
|
37
|
+
dependencies.any? { |dep| test( ?>, dep, target ) }
|
38
38
|
end
|
39
39
|
end
|
40
40
|
end # module DependantFile
|
@@ -43,35 +43,35 @@ class Grammar
|
|
43
43
|
include DependantFile
|
44
44
|
|
45
45
|
GRAMMAR_TYPES = %w(lexer parser tree combined)
|
46
|
-
TYPE_TO_CLASS = {
|
46
|
+
TYPE_TO_CLASS = {
|
47
47
|
'lexer' => 'Lexer',
|
48
48
|
'parser' => 'Parser',
|
49
49
|
'tree' => 'TreeParser'
|
50
50
|
}
|
51
51
|
CLASS_TO_TYPE = TYPE_TO_CLASS.invert
|
52
52
|
|
53
|
-
def self.global_dependency(path)
|
53
|
+
def self.global_dependency( path )
|
54
54
|
path = File.expand_path path.to_s
|
55
|
-
GLOBAL_DEPENDENCIES << path if test(?f, path)
|
55
|
+
GLOBAL_DEPENDENCIES << path if test( ?f, path )
|
56
56
|
return path
|
57
57
|
end
|
58
58
|
|
59
|
-
def self.inline(source, *args)
|
60
|
-
InlineGrammar.new(source, *args)
|
59
|
+
def self.inline( source, *args )
|
60
|
+
InlineGrammar.new( source, *args )
|
61
61
|
end
|
62
62
|
|
63
63
|
##################################################################
|
64
64
|
######## CONSTRUCTOR #############################################
|
65
65
|
##################################################################
|
66
|
-
def initialize(path, options = {})
|
66
|
+
def initialize( path, options = {} )
|
67
67
|
@path = path.to_s
|
68
68
|
@source = File.read( @path )
|
69
|
-
@output_directory = options.fetch(:output_directory, '.')
|
69
|
+
@output_directory = options.fetch( :output_directory, '.' )
|
70
70
|
@verbose = options.fetch( :verbose, $VERBOSE )
|
71
71
|
study
|
72
72
|
build_dependencies
|
73
73
|
|
74
|
-
yield(self) if block_given?
|
74
|
+
yield( self ) if block_given?
|
75
75
|
end
|
76
76
|
|
77
77
|
##################################################################
|
@@ -87,9 +87,9 @@ class Grammar
|
|
87
87
|
def lexer_file_name
|
88
88
|
if lexer? then base = name
|
89
89
|
elsif combined? then base = name + 'Lexer'
|
90
|
-
else return(nil)
|
90
|
+
else return( nil )
|
91
91
|
end
|
92
|
-
return(base + '.rb')
|
92
|
+
return( base + '.rb' )
|
93
93
|
end
|
94
94
|
|
95
95
|
def parser_class_name
|
@@ -99,9 +99,9 @@ class Grammar
|
|
99
99
|
def parser_file_name
|
100
100
|
if parser? then base = name
|
101
101
|
elsif combined? then base = name + 'Parser'
|
102
|
-
else return(nil)
|
102
|
+
else return( nil )
|
103
103
|
end
|
104
|
-
return(base + '.rb')
|
104
|
+
return( base + '.rb' )
|
105
105
|
end
|
106
106
|
|
107
107
|
def tree_parser_class_name
|
@@ -138,11 +138,11 @@ class Grammar
|
|
138
138
|
@type == "combined"
|
139
139
|
end
|
140
140
|
|
141
|
-
def target_files(include_imports = true)
|
141
|
+
def target_files( include_imports = true )
|
142
142
|
targets = []
|
143
143
|
|
144
144
|
for target_type in %w(lexer parser tree_parser)
|
145
|
-
target_name = self.send(:"#{target_type}_file_name") and
|
145
|
+
target_name = self.send( :"#{ target_type }_file_name" ) and
|
146
146
|
targets.push( output_directory / target_name )
|
147
147
|
end
|
148
148
|
|
@@ -151,37 +151,37 @@ class Grammar
|
|
151
151
|
end
|
152
152
|
|
153
153
|
def imports
|
154
|
-
@source.scan(/^\s*import\s+(\w+)\s*;/).
|
154
|
+
@source.scan( /^\s*import\s+(\w+)\s*;/ ).
|
155
155
|
tap { |list| list.flatten! }
|
156
156
|
end
|
157
157
|
|
158
158
|
def imported_target_files
|
159
159
|
imports.map! do |delegate|
|
160
|
-
output_directory / "#{@name}_#{delegate}.rb"
|
160
|
+
output_directory / "#{ @name }_#{ delegate }.rb"
|
161
161
|
end
|
162
162
|
end
|
163
163
|
|
164
164
|
##################################################################
|
165
165
|
##### COMMAND METHODS ############################################
|
166
166
|
##################################################################
|
167
|
-
def compile(options = {})
|
168
|
-
if options[:force] or stale?
|
169
|
-
compile!(options)
|
167
|
+
def compile( options = {} )
|
168
|
+
if options[ :force ] or stale?
|
169
|
+
compile!( options )
|
170
170
|
end
|
171
171
|
end
|
172
172
|
|
173
|
-
def compile!(options = {})
|
174
|
-
command = build_command(options)
|
173
|
+
def compile!( options = {} )
|
174
|
+
command = build_command( options )
|
175
175
|
|
176
176
|
blab( command )
|
177
|
-
output = IO.popen(command) do |pipe|
|
177
|
+
output = IO.popen( command ) do |pipe|
|
178
178
|
pipe.read
|
179
179
|
end
|
180
180
|
|
181
181
|
case status = $?.exitstatus
|
182
182
|
when 0, 130
|
183
|
-
post_compile(options)
|
184
|
-
else compilation_failure!(command, status, output)
|
183
|
+
post_compile( options )
|
184
|
+
else compilation_failure!( command, status, output )
|
185
185
|
end
|
186
186
|
|
187
187
|
return target_files
|
@@ -190,8 +190,8 @@ class Grammar
|
|
190
190
|
def clean!
|
191
191
|
deleted = []
|
192
192
|
for target in target_files
|
193
|
-
if test(?f, target)
|
194
|
-
File.delete(target)
|
193
|
+
if test( ?f, target )
|
194
|
+
File.delete( target )
|
195
195
|
deleted << target
|
196
196
|
end
|
197
197
|
end
|
@@ -204,7 +204,7 @@ class Grammar
|
|
204
204
|
|
205
205
|
private
|
206
206
|
|
207
|
-
def post_compile(options)
|
207
|
+
def post_compile( options )
|
208
208
|
# do nothing for now
|
209
209
|
end
|
210
210
|
|
@@ -216,50 +216,50 @@ private
|
|
216
216
|
ENV[ 'ANTLR_JAR' ] || ANTLR3.antlr_jar
|
217
217
|
end
|
218
218
|
|
219
|
-
def compilation_failure!(command, status, output)
|
219
|
+
def compilation_failure!( command, status, output )
|
220
220
|
for f in target_files
|
221
|
-
test(?f, f) and File.delete(f)
|
221
|
+
test( ?f, f ) and File.delete( f )
|
222
222
|
end
|
223
|
-
raise CompilationFailure.new(self, command, status, output)
|
223
|
+
raise CompilationFailure.new( self, command, status, output )
|
224
224
|
end
|
225
225
|
|
226
226
|
def build_dependencies
|
227
|
-
depends_on(@path)
|
227
|
+
depends_on( @path )
|
228
228
|
|
229
229
|
if @source =~ /tokenVocab\s*=\s*(\S+)\s*;/
|
230
230
|
foreign_grammar_name = $1
|
231
231
|
token_file = output_directory / foreign_grammar_name + '.tokens'
|
232
232
|
grammar_file = File.dirname( path ) / foreign_grammar_name << '.g'
|
233
|
-
depends_on(token_file)
|
234
|
-
depends_on(grammar_file)
|
233
|
+
depends_on( token_file )
|
234
|
+
depends_on( grammar_file )
|
235
235
|
end
|
236
236
|
end
|
237
237
|
|
238
|
-
def shell_escape(token)
|
238
|
+
def shell_escape( token )
|
239
239
|
token = token.to_s.dup
|
240
240
|
token.empty? and return "''"
|
241
|
-
token.gsub!(/([^A-Za-z0-9_\-.,:\/@\n])/n, '\\\1')
|
242
|
-
token.gsub!(/\n/, "'\n'")
|
241
|
+
token.gsub!( /([^A-Za-z0-9_\-.,:\/@\n])/n, '\\\1' )
|
242
|
+
token.gsub!( /\n/, "'\n'" )
|
243
243
|
return token
|
244
244
|
end
|
245
245
|
|
246
|
-
def build_command(options)
|
246
|
+
def build_command( options )
|
247
247
|
parts = %w(java)
|
248
248
|
jar_path = options.fetch( :antlr_jar, default_antlr_jar )
|
249
|
-
parts.push('-cp', jar_path)
|
249
|
+
parts.push( '-cp', jar_path )
|
250
250
|
parts << 'org.antlr.Tool'
|
251
|
-
parts.push('-fo', output_directory)
|
252
|
-
options[:profile] and parts << '-profile'
|
253
|
-
options[:debug] and parts << '-debug'
|
254
|
-
options[:trace] and parts << '-trace'
|
255
|
-
options[:debug_st] and parts << '-XdbgST'
|
256
|
-
parts << File.expand_path(@path)
|
257
|
-
parts.map! { |part| shell_escape(part) }.join(' ') << ' 2>&1'
|
251
|
+
parts.push( '-fo', output_directory )
|
252
|
+
options[ :profile ] and parts << '-profile'
|
253
|
+
options[ :debug ] and parts << '-debug'
|
254
|
+
options[ :trace ] and parts << '-trace'
|
255
|
+
options[ :debug_st ] and parts << '-XdbgST'
|
256
|
+
parts << File.expand_path( @path )
|
257
|
+
parts.map! { |part| shell_escape( part ) }.join( ' ' ) << ' 2>&1'
|
258
258
|
end
|
259
259
|
|
260
260
|
def study
|
261
261
|
@source =~ /^\s*(lexer|parser|tree)?\s*grammar\s*(\S+)\s*;/ or
|
262
|
-
raise Grammar::FormatError[source, path]
|
262
|
+
raise Grammar::FormatError[ source, path ]
|
263
263
|
@name = $2
|
264
264
|
@type = $1 || 'combined'
|
265
265
|
end
|
@@ -271,9 +271,9 @@ class Grammar::InlineGrammar < Grammar
|
|
271
271
|
def initialize( source, options = {} )
|
272
272
|
host = call_stack.find { |call| call.file != __FILE__ }
|
273
273
|
|
274
|
-
@host_file = File.expand_path(options[:file] || host.file)
|
275
|
-
@host_line = (options[:line] || host.line)
|
276
|
-
@output_directory = options.fetch(:output_directory, File.dirname(@host_file))
|
274
|
+
@host_file = File.expand_path( options[ :file ] || host.file )
|
275
|
+
@host_line = ( options[ :line ] || host.line )
|
276
|
+
@output_directory = options.fetch( :output_directory, File.dirname( @host_file ) )
|
277
277
|
@verbose = options.fetch( :verbose, $VERBOSE )
|
278
278
|
|
279
279
|
@source = source.to_s.fixed_indent( 0 )
|
@@ -291,7 +291,7 @@ class Grammar::InlineGrammar < Grammar
|
|
291
291
|
File.basename( @host_file )
|
292
292
|
end
|
293
293
|
|
294
|
-
def path=(v)
|
294
|
+
def path=( v )
|
295
295
|
previous, @path = @path, v.to_s
|
296
296
|
previous == @path or write_to_disk
|
297
297
|
end
|
@@ -304,9 +304,9 @@ private
|
|
304
304
|
|
305
305
|
def write_to_disk
|
306
306
|
@path ||= output_directory / @name + '.g'
|
307
|
-
test(?d, output_directory) or Dir.mkdir( output_directory )
|
308
|
-
unless test(?f, @path) and MD5.digest(@source) == MD5.digest(File.read(@path))
|
309
|
-
open(@path, 'w') { |f| f.write(@source) }
|
307
|
+
test( ?d, output_directory ) or Dir.mkdir( output_directory )
|
308
|
+
unless test( ?f, @path ) and MD5.digest( @source ) == MD5.digest( File.read( @path ) )
|
309
|
+
open( @path, 'w' ) { |f| f.write( @source ) }
|
310
310
|
end
|
311
311
|
end
|
312
312
|
end # class Grammar::InlineGrammar
|
@@ -315,12 +315,12 @@ class Grammar::CompilationFailure < StandardError
|
|
315
315
|
JAVA_TRACE = /^(org\.)?antlr\.\S+\(\S+\.java:\d+\)\s*/
|
316
316
|
attr_reader :grammar, :command, :status, :output
|
317
317
|
|
318
|
-
def initialize(grammar, command, status, output)
|
318
|
+
def initialize( grammar, command, status, output )
|
319
319
|
@command = command
|
320
320
|
@status = status
|
321
321
|
@output = output.gsub( JAVA_TRACE, '' )
|
322
322
|
|
323
|
-
message = <<-END.here_indent! % [command, status, grammar, @output]
|
323
|
+
message = <<-END.here_indent! % [ command, status, grammar, @output ]
|
324
324
|
| command ``%s'' failed with status %s
|
325
325
|
| %p
|
326
326
|
| ~ ~ ~ command output ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~
|
@@ -334,32 +334,32 @@ end # error Grammar::CompilationFailure
|
|
334
334
|
class Grammar::FormatError < StandardError
|
335
335
|
attr_reader :file, :source
|
336
336
|
|
337
|
-
def self.[](*args)
|
338
|
-
new(*args)
|
337
|
+
def self.[]( *args )
|
338
|
+
new( *args )
|
339
339
|
end
|
340
340
|
|
341
|
-
def initialize(source, file = nil)
|
341
|
+
def initialize( source, file = nil )
|
342
342
|
@file = file
|
343
343
|
@source = source
|
344
344
|
message = ''
|
345
345
|
if file.nil? # inline
|
346
346
|
message << "bad inline grammar source:\n"
|
347
|
-
message << ("-" * 80) << "\n"
|
347
|
+
message << ( "-" * 80 ) << "\n"
|
348
348
|
message << @source
|
349
|
-
message[-1] == ?\n or message << "\n"
|
350
|
-
message << ("-" * 80) << "\n"
|
349
|
+
message[ -1 ] == ?\n or message << "\n"
|
350
|
+
message << ( "-" * 80 ) << "\n"
|
351
351
|
message << "could not locate a grammar name and type declaration matching\n"
|
352
352
|
message << "/^\s*(lexer|parser|tree)?\s*grammar\s*(\S+)\s*;/"
|
353
353
|
else
|
354
354
|
message << 'bad grammar source in file %p' % @file
|
355
|
-
message << ("-" * 80) << "\n"
|
355
|
+
message << ( "-" * 80 ) << "\n"
|
356
356
|
message << @source
|
357
|
-
message[-1] == ?\n or message << "\n"
|
358
|
-
message << ("-" * 80) << "\n"
|
357
|
+
message[ -1 ] == ?\n or message << "\n"
|
358
|
+
message << ( "-" * 80 ) << "\n"
|
359
359
|
message << "could not locate a grammar name and type declaration matching\n"
|
360
360
|
message << "/^\s*(lexer|parser|tree)?\s*grammar\s*(\S+)\s*;/"
|
361
361
|
end
|
362
|
-
super(message)
|
362
|
+
super( message )
|
363
363
|
end
|
364
364
|
end # error Grammar::FormatError
|
365
365
|
|
data/lib/antlr3/token.rb
CHANGED
@@ -101,16 +101,32 @@ module Token
|
|
101
101
|
alias :token_index :index
|
102
102
|
alias :token_index= :index=
|
103
103
|
|
104
|
+
#
|
105
|
+
# The match operator has been implemented to match against several different
|
106
|
+
# attributes of a token for convenience in quick scripts
|
107
|
+
#
|
108
|
+
# @example Match against an integer token type constant
|
109
|
+
# token =~ VARIABLE_NAME => true/false
|
110
|
+
# @example Match against a token type name as a Symbol
|
111
|
+
# token =~ :FLOAT => true/false
|
112
|
+
# @example Match the token text against a Regular Expression
|
113
|
+
# token =~ /^@[a-z_]\w*$/i
|
114
|
+
# @example Compare the token's text to a string
|
115
|
+
# token =~ "class"
|
116
|
+
#
|
104
117
|
def =~ obj
|
105
118
|
case obj
|
106
119
|
when Integer then type == obj
|
107
|
-
when Symbol then name
|
120
|
+
when Symbol then name == obj.to_s
|
108
121
|
when Regexp then obj =~ text
|
109
122
|
when String then text == obj
|
110
123
|
else super
|
111
124
|
end
|
112
125
|
end
|
113
126
|
|
127
|
+
#
|
128
|
+
# Tokens are comparable by their stream index values
|
129
|
+
#
|
114
130
|
def <=> tk2
|
115
131
|
index <=> tk2.index
|
116
132
|
end
|
@@ -139,6 +155,10 @@ module Token
|
|
139
155
|
token_name( type )
|
140
156
|
end
|
141
157
|
|
158
|
+
def source_name
|
159
|
+
i = input and i.source_name
|
160
|
+
end
|
161
|
+
|
142
162
|
def hidden?
|
143
163
|
channel == HIDDEN_CHANNEL
|
144
164
|
end
|
@@ -147,28 +167,19 @@ module Token
|
|
147
167
|
concrete? ? input.substring( start, stop ) : text
|
148
168
|
end
|
149
169
|
|
170
|
+
#
|
171
|
+
# Sets the token's channel value to HIDDEN_CHANNEL
|
172
|
+
#
|
150
173
|
def hide!
|
151
174
|
self.channel = HIDDEN_CHANNEL
|
152
175
|
end
|
153
176
|
|
154
|
-
def range
|
155
|
-
start..stop rescue nil
|
156
|
-
end
|
157
|
-
|
158
|
-
def to_i
|
159
|
-
index.to_i
|
160
|
-
end
|
161
|
-
|
162
|
-
def to_s
|
163
|
-
text.to_s
|
164
|
-
end
|
165
|
-
|
166
177
|
def inspect
|
167
|
-
text_inspect = text ?
|
168
|
-
text_position = line
|
169
|
-
stream_position = start ?
|
178
|
+
text_inspect = text ? "[#{ text.inspect }] " : ' '
|
179
|
+
text_position = line > 0 ? "@ line #{ line } col #{ column } " : ''
|
180
|
+
stream_position = start ? "(#{ range.inspect })" : ''
|
170
181
|
|
171
|
-
front = index
|
182
|
+
front = index >= 0 ? "#{ index } " : ''
|
172
183
|
rep = front << name << text_inspect <<
|
173
184
|
text_position << stream_position
|
174
185
|
rep.strip!
|
@@ -180,6 +191,18 @@ module Token
|
|
180
191
|
printer.text( inspect )
|
181
192
|
end
|
182
193
|
|
194
|
+
def range
|
195
|
+
start..stop rescue nil
|
196
|
+
end
|
197
|
+
|
198
|
+
def to_i
|
199
|
+
index.to_i
|
200
|
+
end
|
201
|
+
|
202
|
+
def to_s
|
203
|
+
text.to_s
|
204
|
+
end
|
205
|
+
|
183
206
|
private
|
184
207
|
|
185
208
|
def token_name( type )
|
@@ -526,6 +549,7 @@ class TokenScheme < ::Module
|
|
526
549
|
mod.extend( self )
|
527
550
|
end
|
528
551
|
private :included
|
552
|
+
|
529
553
|
attr_reader :unused, :types
|
530
554
|
|
531
555
|
def define_tokens( token_map = {} )
|
data/lib/antlr3/tree.rb
CHANGED
@@ -355,8 +355,6 @@ class BaseTree < ::Array
|
|
355
355
|
alias add_children concat
|
356
356
|
alias each_child each
|
357
357
|
|
358
|
-
|
359
|
-
|
360
358
|
def set_child( index, tree )
|
361
359
|
return if tree.nil?
|
362
360
|
tree.flat_list? and raise ArgumentError, "Can't set single child to a list"
|
@@ -443,9 +441,6 @@ class BaseTree < ::Array
|
|
443
441
|
|
444
442
|
def root?() @parent.nil? end
|
445
443
|
alias leaf? empty?
|
446
|
-
|
447
|
-
|
448
|
-
|
449
444
|
end
|
450
445
|
|
451
446
|
|
@@ -828,6 +823,7 @@ builds and manipulates CommonTree nodes.
|
|
828
823
|
=end
|
829
824
|
|
830
825
|
class CommonTreeAdaptor
|
826
|
+
extend ClassMacros
|
831
827
|
include TreeAdaptor
|
832
828
|
include ANTLR3::Constants
|
833
829
|
|
@@ -892,16 +888,17 @@ class CommonTreeAdaptor
|
|
892
888
|
end
|
893
889
|
end
|
894
890
|
|
895
|
-
|
891
|
+
creation_methods = %w(
|
892
|
+
create_from_token create_from_type
|
893
|
+
create_error_node create_with_payload
|
894
|
+
create
|
895
|
+
)
|
896
896
|
|
897
|
-
|
898
|
-
|
899
|
-
|
900
|
-
|
901
|
-
|
902
|
-
alias create_error_node! create_error_node
|
903
|
-
alias create_with_payload! create_with_payload
|
904
|
-
alias create! create
|
897
|
+
for method_name in creation_methods
|
898
|
+
bang_method = method_name + '!'
|
899
|
+
alias_method( bang_method, method_name )
|
900
|
+
deprecate( bang_method, "use method ##{ method_name } instead" )
|
901
|
+
end
|
905
902
|
|
906
903
|
def rule_post_processing( root )
|
907
904
|
if root and root.flat_list?
|