antlr3 1.6.0 → 1.6.3

Sign up to get free protection for your applications and to get access to all the features.
data/lib/antlr3/task.rb CHANGED
@@ -61,7 +61,7 @@ class CompileTask < Rake::TaskLib
61
61
 
62
62
  def grammar_set( *grammar_files )
63
63
  grammar_files = [ grammar_files ].flatten!
64
- options = @options.merge(
64
+ options = @options.merge(
65
65
  Hash === grammar_files.last ? grammar_files.pop : {}
66
66
  )
67
67
  set = GrammarSet.new( grammar_files, options )
@@ -71,18 +71,18 @@ class CompileTask < Rake::TaskLib
71
71
  end
72
72
 
73
73
  def compile_task
74
- full_name = ( @namespace + [ @name, 'compile' ] ).join(':')
74
+ full_name = ( @namespace + [ @name, 'compile' ] ).join( ':' )
75
75
  Rake::Task[ full_name ]
76
76
  end
77
77
 
78
78
  def clobber_task
79
- full_name = ( @namespace + [ @name, 'clobber' ] ).join(':')
79
+ full_name = ( @namespace + [ @name, 'clobber' ] ).join( ':' )
80
80
  Rake::Task[ full_name ]
81
81
  end
82
82
 
83
83
  def define
84
84
  namespace( @name ) do
85
- desc( "trash all ANTLR-generated source code")
85
+ desc( "trash all ANTLR-generated source code" )
86
86
  task( 'clobber' ) do
87
87
  for set in @grammar_sets
88
88
  set.clean
@@ -129,12 +129,12 @@ class CompileTask::GrammarSet
129
129
  @compile_options =
130
130
  case opts = options[ :compile_options ]
131
131
  when Array then opts
132
- else Shellwords.shellsplit( opts.to_s )
132
+ else Shellwords.shellwords( opts.to_s )
133
133
  end
134
134
  @java_options =
135
135
  case opts = options[ :java_options ]
136
136
  when Array then opts
137
- else Shellwords.shellsplit( opts.to_s )
137
+ else Shellwords.shellwords( opts.to_s )
138
138
  end
139
139
  end
140
140
 
@@ -201,24 +201,33 @@ class CompileTask::GrammarSet
201
201
  parts << '-trace' if @trace
202
202
  parts.concat( @compile_options )
203
203
  parts << grammar.path
204
- return Shellwords.shelljoin( parts )
204
+ return parts.map! { | t | escape( t ) }.join( ' ' )
205
205
  end
206
+
207
+ def escape( token )
208
+ token = token.to_s.dup
209
+ token.empty? and return( %('') )
210
+ token.gsub!( /([^A-Za-z0-9_\-.,:\/@\n])/n, "\\\\\\1" )
211
+ token.gsub!( /\n/, "'\n'" )
212
+ return( token )
213
+ end
214
+
206
215
  end
207
216
 
208
217
  class GrammarFile
209
- LANGUAGES = {
210
- "ActionScript" => [".as"],
211
- "CSharp2" => [".cs"],
212
- "C" => [".c", ".h"],
213
- "ObjC" => [".m", ".h"],
214
- "CSharp3" => [".cs"],
215
- "Cpp" => [".cpp", ".h"],
216
- "Ruby" => [".rb"],
217
- "Java" => [".java"],
218
- "JavaScript" => [".js"],
219
- "Python" => [".py"],
220
- "Delphi" => [".pas"],
221
- "Perl5" => [".pm"]
218
+ LANGUAGES = {
219
+ "ActionScript" => [ ".as" ],
220
+ "CSharp2" => [ ".cs" ],
221
+ "C" => [ ".c", ".h" ],
222
+ "ObjC" => [ ".m", ".h" ],
223
+ "CSharp3" => [ ".cs" ],
224
+ "Cpp" => [ ".cpp", ".h" ],
225
+ "Ruby" => [ ".rb" ],
226
+ "Java" => [ ".java" ],
227
+ "JavaScript" => [ ".js" ],
228
+ "Python" => [ ".py" ],
229
+ "Delphi" => [ ".pas" ],
230
+ "Perl5" => [ ".pm" ]
222
231
  }.freeze
223
232
  GRAMMAR_TYPES = %w(lexer parser tree combined)
224
233
 
@@ -305,7 +314,7 @@ class GrammarFile
305
314
  targets = [ tokens_file ]
306
315
 
307
316
  for target_type in %w( lexer parser tree_parser )
308
- for file in self.send( :"#{ target_type }_files")
317
+ for file in self.send( :"#{ target_type }_files" )
309
318
  targets << file
310
319
  end
311
320
  end
@@ -395,7 +404,7 @@ private
395
404
  @token_vocab = $1
396
405
  end
397
406
 
398
- @source.scan(/^\s*import\s+(\w+\s*(?:,\s*\w+\s*)*);/) do
407
+ @source.scan( /^\s*import\s+(\w+\s*(?:,\s*\w+\s*)*);/ ) do
399
408
  list = $1.strip
400
409
  @imports.concat( list.split( /\s*,\s*/ ) )
401
410
  end
@@ -435,33 +444,33 @@ end
435
444
  class GrammarFile::FormatError < StandardError
436
445
  attr_reader :file, :source
437
446
 
438
- def self.[](*args)
439
- new(*args)
447
+ def self.[]( *args )
448
+ new( *args )
440
449
  end
441
450
 
442
- def initialize(source, file = nil)
451
+ def initialize( source, file = nil )
443
452
  @file = file
444
453
  @source = source
445
454
  message = ''
446
455
  if file.nil? # inline
447
456
  message << "bad inline grammar source:\n"
448
- message << ("-" * 80) << "\n"
457
+ message << ( "-" * 80 ) << "\n"
449
458
  message << @source
450
- message[-1] == ?\n or message << "\n"
451
- message << ("-" * 80) << "\n"
459
+ message[ -1 ] == ?\n or message << "\n"
460
+ message << ( "-" * 80 ) << "\n"
452
461
  message << "could not locate a grammar name and type declaration matching\n"
453
462
  message << "/^\s*(lexer|parser|tree)?\s*grammar\s*(\S+)\s*;/"
454
463
  else
455
464
  message << 'bad grammar source in file %p' % @file
456
- message << ("-" * 80) << "\n"
465
+ message << ( "-" * 80 ) << "\n"
457
466
  message << @source
458
- message[-1] == ?\n or message << "\n"
459
- message << ("-" * 80) << "\n"
467
+ message[ -1 ] == ?\n or message << "\n"
468
+ message << ( "-" * 80 ) << "\n"
460
469
  message << "could not locate a grammar name and type declaration matching\n"
461
470
  message << "/^\s*(lexer|parser|tree)?\s*grammar\s*(\S+)\s*;/"
462
471
  end
463
- super(message)
472
+ super( message )
464
473
  end
465
474
  end # error Grammar::FormatError
466
475
  end # class CompileTask
467
- end # module ANTLR3
476
+ end # module ANTLR3
@@ -1,5 +1,5 @@
1
1
  #!/usr/bin/ruby
2
- # encoding: utf-8
2
+
3
3
 
4
4
  require 'erb'
5
5
  require 'antlr3'
@@ -237,7 +237,7 @@ class Context
237
237
 
238
238
  def method_missing( method, *args )
239
239
  case name = method.to_s
240
- when SETTER_FORM then return( self[ $1 ] = *args )
240
+ when SETTER_FORM then return( self[ $1 ] = args.first )
241
241
  when ATTR_FORM
242
242
  args.empty? and has_ivar?( name ) and return( self[ name ] )
243
243
  end
@@ -787,7 +787,7 @@ class Group
787
787
  while true
788
788
  alt_11 = 2
789
789
  look_11_0 = @input.peek(1)
790
-
790
+
791
791
  if (look_11_0.between?(?\t, ?\n) || look_11_0.between?(?\f, ?\r) || look_11_0 == ?\s)
792
792
  alt_11 = 1
793
793
 
data/lib/antlr3/token.rb CHANGED
@@ -4,7 +4,7 @@
4
4
  =begin LICENSE
5
5
 
6
6
  [The "BSD licence"]
7
- Copyright (c) 2009 Kyle Yetter
7
+ Copyright (c) 2009-2010 Kyle Yetter
8
8
  All rights reserved.
9
9
 
10
10
  Redistribution and use in source and binary forms, with or without
@@ -115,7 +115,7 @@ module Token
115
115
  index <=> tk2.index
116
116
  end
117
117
 
118
- def initialize_copy(orig)
118
+ def initialize_copy( orig )
119
119
  self.index = -1
120
120
  self.type = orig.type
121
121
  self.channel = orig.channel
@@ -136,7 +136,7 @@ module Token
136
136
  end
137
137
 
138
138
  def name
139
- token_name(type)
139
+ token_name( type )
140
140
  end
141
141
 
142
142
  def hidden?
@@ -144,7 +144,7 @@ module Token
144
144
  end
145
145
 
146
146
  def source_text
147
- concrete? ? input.substring(start, stop) : text
147
+ concrete? ? input.substring( start, stop ) : text
148
148
  end
149
149
 
150
150
  def hide!
@@ -165,30 +165,30 @@ module Token
165
165
 
166
166
  def inspect
167
167
  text_inspect = text ? '[%p] ' % text : ' '
168
- text_position = line != 0 ? '@ line %s col %s ' % [line, column] : ''
169
- stream_position = start ? '(%s..%s)' % [start, stop] : ''
168
+ text_position = line != 0 ? '@ line %s col %s ' % [ line, column ] : ''
169
+ stream_position = start ? '(%s..%s)' % [ start, stop ] : ''
170
170
 
171
171
  front = index != -1 ? index.to_s << ' ' : ''
172
172
  rep = front << name << text_inspect <<
173
173
  text_position << stream_position
174
174
  rep.strip!
175
- channel == DEFAULT_CHANNEL or rep << " (#{channel.to_s})"
176
- return(rep)
175
+ channel == DEFAULT_CHANNEL or rep << " (#{ channel.to_s })"
176
+ return( rep )
177
177
  end
178
178
 
179
- def pretty_print(printer)
179
+ def pretty_print( printer )
180
180
  printer.text( inspect )
181
181
  end
182
182
 
183
183
  private
184
184
 
185
- def token_name(type)
186
- BUILT_IN_TOKEN_NAMES[type]
185
+ def token_name( type )
186
+ BUILT_IN_TOKEN_NAMES[ type ]
187
187
  end
188
188
  end
189
189
 
190
- CommonToken = Struct.new(:type, :channel, :text, :input, :start,
191
- :stop, :index, :line, :column)
190
+ CommonToken = Struct.new( :type, :channel, :text, :input, :start,
191
+ :stop, :index, :line, :column )
192
192
 
193
193
  =begin rdoc ANTLR3::CommonToken
194
194
 
@@ -219,36 +219,38 @@ Here is the token structure attribute list in order:
219
219
 
220
220
  class CommonToken
221
221
  include Token
222
- DEFAULT_VALUES = {
222
+ DEFAULT_VALUES = {
223
223
  :channel => DEFAULT_CHANNEL,
224
224
  :index => -1,
225
225
  :line => 0,
226
226
  :column => -1
227
227
  }.freeze
228
228
 
229
- def self.token_name(type)
230
- BUILT_IN_TOKEN_NAMES[type]
229
+ def self.token_name( type )
230
+ BUILT_IN_TOKEN_NAMES[ type ]
231
231
  end
232
232
 
233
- def self.create(fields = {})
234
- fields = DEFAULT_VALUES.merge(fields)
235
- args = members.map { |name| fields[name.to_sym] }
236
- new(*args)
233
+ def self.create( fields = {} )
234
+ fields = DEFAULT_VALUES.merge( fields )
235
+ args = members.map { |name| fields[ name.to_sym ] }
236
+ new( *args )
237
237
  end
238
238
 
239
239
  # allows you to make a copy of a token with a different class
240
- def self.from_token(token)
241
- new(token.type, token.channel, token.text ? token.text.clone : nil,
242
- token.input, token.start, token.stop, -1, token.line, token.column)
240
+ def self.from_token( token )
241
+ new(
242
+ token.type, token.channel, token.text ? token.text.clone : nil,
243
+ token.input, token.start, token.stop, -1, token.line, token.column
244
+ )
243
245
  end
244
246
 
245
- def initialize(type = nil, channel = DEFAULT_CHANNEL, text = nil,
247
+ def initialize( type = nil, channel = DEFAULT_CHANNEL, text = nil,
246
248
  input = nil, start = nil, stop = nil, index = -1,
247
- line = 0, column = -1)
249
+ line = 0, column = -1 )
248
250
  super
249
- block_given? and yield(self)
251
+ block_given? and yield( self )
250
252
  self.text.nil? && self.start && self.stop and
251
- self.text = self.input.substring(self.start, self.stop)
253
+ self.text = self.input.substring( self.start, self.stop )
252
254
  end
253
255
 
254
256
  alias :input_stream :input
@@ -257,9 +259,15 @@ class CommonToken
257
259
  alias :token_index= :index=
258
260
  end
259
261
 
260
- Constants::EOF_TOKEN = CommonToken.new(EOF).freeze
261
- Constants::INVALID_TOKEN = CommonToken.new(INVALID_TOKEN_TYPE).freeze
262
- Constants::SKIP_TOKEN = CommonToken.new(INVALID_TOKEN_TYPE).freeze
262
+ module Constants
263
+
264
+ # End of File / End of Input character and token type
265
+ EOF_TOKEN = CommonToken.new( EOF ).freeze
266
+ INVALID_TOKEN = CommonToken.new( INVALID_TOKEN_TYPE ).freeze
267
+ SKIP_TOKEN = CommonToken.new( INVALID_TOKEN_TYPE ).freeze
268
+ end
269
+
270
+
263
271
 
264
272
  =begin rdoc ANTLR3::TokenSource
265
273
 
@@ -285,7 +293,7 @@ module TokenSource
285
293
  return token
286
294
  end
287
295
 
288
- def to_stream(options = {})
296
+ def to_stream( options = {} )
289
297
  if block_given?
290
298
  CommonTokenStream.new( self, options ) { | t, stream | yield( t, stream ) }
291
299
  else
@@ -294,7 +302,7 @@ module TokenSource
294
302
  end
295
303
 
296
304
  def each
297
- block_given? or return enum_for(:each)
305
+ block_given? or return enum_for( :each )
298
306
  loop { yield( self.next ) }
299
307
  rescue StopIteration
300
308
  return self
@@ -326,13 +334,13 @@ module TokenFactory
326
334
  end
327
335
  end
328
336
 
329
- def create_token(*args)
337
+ def create_token( *args )
330
338
  if block_given?
331
- token_class.new(*args) do |*targs|
332
- yield(*targs)
339
+ token_class.new( *args ) do |*targs|
340
+ yield( *targs )
333
341
  end
334
342
  else
335
- token_class.new(*args)
343
+ token_class.new( *args )
336
344
  end
337
345
  end
338
346
  end
@@ -467,9 +475,9 @@ dynamically-created CommonToken subclass.
467
475
  class TokenScheme < ::Module
468
476
  include TokenFactory
469
477
 
470
- def self.new(tk_class = nil, &body)
478
+ def self.new( tk_class = nil, &body )
471
479
  super() do
472
- tk_class ||= Class.new(::ANTLR3::CommonToken)
480
+ tk_class ||= Class.new( ::ANTLR3::CommonToken )
473
481
  self.token_class = tk_class
474
482
 
475
483
  const_set( :TOKEN_NAMES, ::ANTLR3::Constants::BUILT_IN_TOKEN_NAMES.clone )
@@ -484,14 +492,14 @@ class TokenScheme < ::Module
484
492
  begin
485
493
  token_names[ type ] or super
486
494
  rescue NoMethodError
487
- ::ANTLR3::CommonToken.token_name(type)
495
+ ::ANTLR3::CommonToken.token_name( type )
488
496
  end
489
497
  end
490
498
  module_function :token_name, :token_names
491
499
 
492
500
  include ANTLR3::Constants
493
501
 
494
- body and module_eval(&body)
502
+ body and module_eval( &body )
495
503
  end
496
504
  end
497
505
 
@@ -513,16 +521,16 @@ class TokenScheme < ::Module
513
521
  end
514
522
 
515
523
 
516
- def included(mod)
524
+ def included( mod )
517
525
  super
518
- mod.extend(self)
526
+ mod.extend( self )
519
527
  end
520
528
  private :included
521
529
  attr_reader :unused, :types
522
530
 
523
531
  def define_tokens( token_map = {} )
524
532
  for token_name, token_value in token_map
525
- define_token(token_name, token_value)
533
+ define_token( token_name, token_value )
526
534
  end
527
535
  return self
528
536
  end
@@ -535,9 +543,9 @@ class TokenScheme < ::Module
535
543
  # raise an error unless value is the same as the current value
536
544
  value ||= current_value
537
545
  unless current_value == value
538
- raise NameError.new(
539
- "new token type definition ``#{name} = #{value}'' conflicts " <<
540
- "with existing type definition ``#{name} = #{current_value}''", name
546
+ raise NameError.new(
547
+ "new token type definition ``#{ name } = #{ value }'' conflicts " <<
548
+ "with existing type definition ``#{ name } = #{ current_value }''", name
541
549
  )
542
550
  end
543
551
  else
@@ -556,58 +564,58 @@ class TokenScheme < ::Module
556
564
  return self
557
565
  end
558
566
 
559
- def register_names(*names)
567
+ def register_names( *names )
560
568
  if names.length == 1 and Hash === names.first
561
569
  names.first.each do |value, name|
562
- register_name(value, name)
570
+ register_name( value, name )
563
571
  end
564
572
  else
565
573
  names.each_with_index do |name, i|
566
574
  type_value = Constants::MIN_TOKEN_TYPE + i
567
- register_name(type_value, name)
575
+ register_name( type_value, name )
568
576
  end
569
577
  end
570
578
  end
571
579
 
572
580
  def register_name( type_value, name )
573
581
  name = name.to_s.freeze
574
- if token_names.has_key?(type_value)
575
- current_name = token_names[type_value]
582
+ if token_names.has_key?( type_value )
583
+ current_name = token_names[ type_value ]
576
584
  current_name == name and return name
577
585
 
578
586
  if current_name == "T__#{ type_value }"
579
587
  # only an anonymous name is registered -- upgrade the name to the full literal name
580
- token_names[type_value] = name
581
- elsif name == "T__#{type_value}"
588
+ token_names[ type_value ] = name
589
+ elsif name == "T__#{ type_value }"
582
590
  # ignore name downgrade from literal to anonymous constant
583
591
  return current_name
584
592
  else
585
- error = NameError.new(
586
- "attempted assignment of token type #{type_value}" <<
587
- " to name #{name} conflicts with existing name #{current_name}", name
593
+ error = NameError.new(
594
+ "attempted assignment of token type #{ type_value }" <<
595
+ " to name #{ name } conflicts with existing name #{ current_name }", name
588
596
  )
589
597
  raise error
590
598
  end
591
599
  else
592
- token_names[type_value] = name.to_s.freeze
600
+ token_names[ type_value ] = name.to_s.freeze
593
601
  end
594
602
  end
595
603
 
596
- def built_in_type?(type_value)
597
- Constants::BUILT_IN_TOKEN_NAMES.fetch(type_value, false) and true
604
+ def built_in_type?( type_value )
605
+ Constants::BUILT_IN_TOKEN_NAMES.fetch( type_value, false ) and true
598
606
  end
599
607
 
600
- def token_defined?(name_or_value)
608
+ def token_defined?( name_or_value )
601
609
  case value
602
- when Integer then token_names.has_key?(name_or_value)
603
- else const_defined?(name_or_value.to_s)
610
+ when Integer then token_names.has_key?( name_or_value )
611
+ else const_defined?( name_or_value.to_s )
604
612
  end
605
613
  end
606
614
 
607
- def [](name_or_value)
615
+ def []( name_or_value )
608
616
  case name_or_value
609
- when Integer then token_names.fetch(name_or_value, nil)
610
- else const_get(name_or_value.to_s) rescue token_names.index(name_or_value)
617
+ when Integer then token_names.fetch( name_or_value, nil )
618
+ else const_get( name_or_value.to_s ) rescue token_names.index( name_or_value )
611
619
  end
612
620
  end
613
621
 
@@ -615,10 +623,10 @@ class TokenScheme < ::Module
615
623
  self::Token
616
624
  end
617
625
 
618
- def token_class=(klass)
619
- Class === klass or raise(TypeError, "token_class must be a Class")
626
+ def token_class=( klass )
627
+ Class === klass or raise( TypeError, "token_class must be a Class" )
620
628
  Util.silence_warnings do
621
- klass < self or klass.send(:include, self)
629
+ klass < self or klass.send( :include, self )
622
630
  const_set( :Token, klass )
623
631
  end
624
632
  end