antlr3 1.2.4 → 1.3.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -8,17 +8,5 @@ $:.unshift( lib )
8
8
  require 'antlr3'
9
9
 
10
10
  jar = ANTLR3.antlr_jar or fail( "cannot find antlr4ruby's customized ANTLR jar" )
11
- escape = proc do | a |
12
- if a.empty? then "''"
13
- else
14
- a.gsub( /([^A-Za-z0-9_\-.,:\/@\n])/n, '\\\1' ).
15
- gsub( /\n/, "'\n'" )
16
- end
17
- end
18
11
 
19
- command = %w(java -jar) << escape[ jar ]
20
- for arg in ARGV
21
- command << escape[ arg ]
22
- end
23
-
24
- exec command.join(' ')
12
+ exec( 'java', '-jar', jar, *ARGV )
@@ -63,11 +63,12 @@ extends Target
63
63
  public String toString(Object o) {
64
64
  return o.toString();
65
65
  }
66
+
66
67
  public String toString(Object o, String formatName) {
67
68
  String idString = o.toString();
68
-
69
+
69
70
  if (idString.isEmpty()) return idString;
70
-
71
+
71
72
  if (formatName.equals("snakecase")) {
72
73
  return snakecase(idString);
73
74
  } else if (formatName.equals("camelcase")) {
@@ -393,132 +394,14 @@ extends Target
393
394
  */
394
395
  public String encodeIntAsCharEscape(final int v) {
395
396
  final int intValue;
396
-
397
+
397
398
  if (v == 65535) {
398
- intValue = -1;
399
+ intValue = -1;
399
400
  } else {
400
- intValue = v;
401
+ intValue = v;
401
402
  }
402
-
403
+
403
404
  return String.valueOf(intValue);
405
+ // return String.valueOf(v);
404
406
  }
405
- // public List postProcessAction(List chunks, antlr.Token actionToken) {
406
- // List nChunks = new ArrayList();
407
- //
408
- // for (int i = 0; i < chunks.size(); i++) {
409
- // Object chunk = chunks.get(i);
410
- //
411
- // if ( chunk instanceof String ) {
412
- // String text = (String)chunks.get(i);
413
- // if ( nChunks.size() == 0 && actionToken.getColumn() > 0 ) {
414
- // // first chunk and some 'virtual' WS at beginning
415
- // // prepend to this chunk
416
- //
417
- // String ws = "";
418
- // for ( int j = 0 ; j < actionToken.getColumn() ; j++ ) {
419
- // ws += " ";
420
- // }
421
- // text = ws + text;
422
- // }
423
- //
424
- // String[] parts = text.split("\r?\n");
425
- // for ( String line : parts ) {
426
- // nChunks.add(line);
427
- // }
428
- // }
429
- // else {
430
- // if ( nChunks.size() == 0 && actionToken.getColumn() > 0 ) {
431
- // // first chunk and some 'virtual' WS at beginning
432
- // // add as a chunk of its own
433
- //
434
- // String ws = "";
435
- // for ( int j = 0 ; j < actionToken.getColumn() ; j++ ) {
436
- // ws += " ";
437
- // }
438
- // nChunks.add(ws);
439
- // }
440
- //
441
- // nChunks.add(chunk);
442
- // }
443
- // }
444
- //
445
- // int lineNo = actionToken.getLine();
446
- // int col = 0;
447
- //
448
- // // strip trailing empty lines
449
- // int lastChunk = nChunks.size() - 1;
450
- // while ( lastChunk > 0
451
- // && nChunks.get(lastChunk) instanceof String
452
- // && ((String)nChunks.get(lastChunk)).trim().length() == 0 )
453
- // lastChunk--;
454
- //
455
- // // string leading empty lines
456
- // int firstChunk = 0;
457
- // while ( firstChunk <= lastChunk
458
- // && nChunks.get(firstChunk) instanceof String
459
- // && ((String)nChunks.get(firstChunk)).trim().length() == 0
460
- // && ((String)nChunks.get(firstChunk)).endsWith("\n") ) {
461
- // lineNo++;
462
- // firstChunk++;
463
- // }
464
- //
465
- // int indent = -1;
466
- // for ( int i = firstChunk ; i <= lastChunk ; i++ ) {
467
- // Object chunk = nChunks.get(i);
468
- //
469
- // //System.out.println(lineNo + ":" + col + " " + quote(chunk.toString()));
470
- //
471
- // if ( chunk instanceof String ) {
472
- // String text = (String)chunk;
473
- //
474
- // if ( col == 0 ) {
475
- // if ( indent == -1 ) {
476
- // // first non-blank line
477
- // // count number of leading whitespaces
478
- //
479
- // indent = 0;
480
- // for ( int j = 0; j < text.length(); j++ ) {
481
- // if ( !Character.isWhitespace(text.charAt(j)) )
482
- // break;
483
- //
484
- // indent++;
485
- // }
486
- // }
487
- //
488
- // if ( text.length() >= indent ) {
489
- // int j;
490
- // for ( j = 0; j < indent ; j++ ) {
491
- // if ( !Character.isWhitespace(text.charAt(j)) ) {
492
- // // should do real error reporting here...
493
- // System.err.println("Warning: badly indented line " + lineNo + " in action:");
494
- // System.err.println(text);
495
- // break;
496
- // }
497
- // }
498
- //
499
- // nChunks.set(i, text.substring(j));
500
- // }
501
- // else if ( text.trim().length() > 0 ) {
502
- // // should do real error reporting here...
503
- // System.err.println("Warning: badly indented line " + lineNo + " in action:");
504
- // System.err.println(text);
505
- // }
506
- // }
507
- //
508
- // if ( text.endsWith("\n") ) {
509
- // lineNo++;
510
- // col = 0;
511
- // }
512
- // else {
513
- // col += text.length();
514
- // }
515
- // }
516
- // else {
517
- // // not really correct, but all I need is col to increment...
518
- // col += 1;
519
- // }
520
- // }
521
- //
522
- // return nChunks;
523
- // }
524
407
  }
Binary file
@@ -82,6 +82,8 @@ module Constants
82
82
  2 => "<DOWN>".freeze, 3 => "<UP>".freeze,
83
83
  -1 => "<EOF>".freeze
84
84
  )
85
+
86
+
85
87
  end
86
88
 
87
89
  include Constants
@@ -105,14 +105,15 @@ class DFA
105
105
  :accept, :special, :transition, :special_block
106
106
 
107
107
  class << self
108
- attr_reader :decision
108
+ attr_reader :decision, :eot, :eof, :min, :max,
109
+ :accept, :special, :transition
109
110
 
110
111
  def unpack(*data)
111
112
  data.empty? and return [].freeze
112
113
 
113
114
  n = data.length / 2
114
115
  size = 0
115
- n.times { |i| size += data[2*i] }
116
+ n.times { |i| size += data[ 2*i ] }
116
117
  if size > 1024
117
118
  values = Hash.new(0)
118
119
  data.each_slice(2) do |count, value|
@@ -131,26 +132,28 @@ class DFA
131
132
  else
132
133
  unpacked = []
133
134
  data.each_slice(2) do |count, value|
134
- unpacked.concat Array.new(count, value)
135
+ unpacked.fill( value, unpacked.length, count )
135
136
  end
136
137
  end
137
138
 
138
- return unpacked.freeze
139
+ return unpacked
139
140
  end
141
+
140
142
  end
141
143
 
142
- def initialize(recognizer, decision_number = nil, eot = nil, eof = nil,
143
- min = nil, max = nil, accept = nil, special = nil,
144
- transition = nil, &special_block)
144
+ def initialize( recognizer, decision_number = nil,
145
+ eot = nil, eof = nil, min = nil, max = nil,
146
+ accept = nil, special = nil,
147
+ transition = nil, &special_block )
145
148
  @recognizer = recognizer
146
149
  @decision_number = decision_number || self.class.decision
147
- @eot = eot || self.class::EOT
148
- @eof = eof || self.class::EOF
149
- @min = min || self.class::MIN
150
- @max = max || self.class::MAX
151
- @accept = accept || self.class::ACCEPT
152
- @special = special || self.class::SPECIAL
153
- @transition = transition || self.class::TRANSITION
150
+ @eot = eot || self.class::EOT #.eot
151
+ @eof = eof || self.class::EOF #.eof
152
+ @min = min || self.class::MIN #.min
153
+ @max = max || self.class::MAX #.max
154
+ @accept = accept || self.class::ACCEPT #.accept
155
+ @special = special || self.class::SPECIAL #.special
156
+ @transition = transition || self.class::TRANSITION #.transition
154
157
  @special_block = special_block
155
158
  rescue NameError => e
156
159
  raise unless e.message =~ /uninitialized constant/
@@ -185,7 +188,7 @@ class DFA
185
188
  c = input.peek
186
189
  # the @min and @max arrays contain the bounds of the character (or token type)
187
190
  # ranges for the transition decisions
188
- if c.between?(@min[state], @max[state])
191
+ if c.between?( @min[ state ], @max[ state ] )
189
192
  # c - @min[state] is the position of the character within the range
190
193
  # so for a range like ?a..?z, a match of ?a would be 0,
191
194
  # ?c would be 2, and ?z would be 25
@@ -100,7 +100,7 @@ class RecognitionError < StandardError
100
100
  include ANTLR3::Constants
101
101
  attr_accessor :input, :index, :line, :column, :symbol, :token, :source_name
102
102
 
103
- def initialize(input = nil)
103
+ def initialize( input = nil )
104
104
  @index = @line = @column = nil
105
105
  @approximate_line_info = false
106
106
  if @input = input
@@ -116,11 +116,11 @@ class RecognitionError < StandardError
116
116
  @line = @input.line
117
117
  @column = @input.column
118
118
  when AST::TreeNodeStream
119
- @symbol = nodes.look
119
+ @symbol = @input.look
120
120
  if @symbol.respond_to?(:line) and @symbol.respond_to?(:column)
121
121
  @line, @column = @symbol.line, @symbol.column
122
122
  else
123
- extract_from_node_stream(@input)
123
+ extract_from_node_stream( @input )
124
124
  end
125
125
  else
126
126
  @symbol = @input.look
@@ -338,13 +338,13 @@ generated lexer file is run directly from the command line.
338
338
 
339
339
  =end
340
340
  class LexerMain < Main
341
- def initialize(lexer_class, options = {})
342
- super(options)
341
+ def initialize( lexer_class, options = {} )
342
+ super( options )
343
343
  @lexer_class = lexer_class
344
344
  end
345
345
 
346
- def recognize(in_stream)
347
- lexer = @lexer_class.new(in_stream)
346
+ def recognize( in_stream )
347
+ lexer = @lexer_class.new( in_stream )
348
348
 
349
349
  loop do
350
350
  begin
@@ -353,7 +353,7 @@ class LexerMain < Main
353
353
  else display_token(token)
354
354
  end
355
355
  rescue ANTLR3::RecognitionError => error
356
- report_error(error)
356
+ report_error( error )
357
357
  break
358
358
  end
359
359
  end
@@ -365,11 +365,11 @@ class LexerMain < Main
365
365
  prefix = '-->'
366
366
  suffix = ''
367
367
  when ANTLR3::HIDDEN_CHANNEL
368
- prefix = '// '
369
- suffix = ' (hidden) '
368
+ prefix = '# '
369
+ suffix = ' (hidden)'
370
370
  else
371
371
  prefix = '~~>'
372
- suffix = ' (channel %p) ' % token.channel
372
+ suffix = ' (channel %p)' % token.channel
373
373
  end
374
374
 
375
375
  printf("%s %-15s %-15p @ line %-3i col %-3i%s\n",
@@ -557,5 +557,4 @@ class WalkerMain < Main
557
557
  end
558
558
  end
559
559
  end
560
-
561
560
  end
@@ -26,16 +26,36 @@ module ASTBuilder
26
26
  private
27
27
 
28
28
  def subtree_stream(desc, element = nil)
29
- AST::RewriteRuleSubtreeStream.new(@adaptor, desc, element)
29
+ AST::RewriteRuleSubtreeStream.new( @adaptor, desc, element )
30
+ #if element.instance_of?( Array )
31
+ # AST::RewriteRuleSubtreeStream.new( @adaptor, desc, *element )
32
+ #elsif element
33
+ # AST::RewriteRuleSubtreeStream.new( @adaptor, desc, element )
34
+ #else
35
+ # AST::RewriteRuleSubtreeStream.new( @adaptor, desc )
36
+ #end
30
37
  end
31
38
 
32
39
  def token_stream(desc, element = nil)
33
- AST::RewriteRuleTokenStream.new(@adaptor, desc, element)
40
+ AST::RewriteRuleTokenStream.new( @adaptor, desc, element )
41
+ #if element.instance_of?( Array )
42
+ # AST::RewriteRuleTokenStream.new( @adaptor, desc, *element )
43
+ #elsif element
44
+ # AST::RewriteRuleTokenStream.new( @adaptor, desc, element )
45
+ #else
46
+ # AST::RewriteRuleTokenStream.new( @adaptor, desc )
47
+ #end
34
48
  end
35
49
 
36
50
  def node_stream(desc, element = nil)
37
- AST::RewriteRuleNodeStream.new(@adaptor, desc, element)
51
+ AST::RewriteRuleNodeStream.new( @adaptor, desc, element )
52
+ #if element.instance_of?( Array )
53
+ # AST::RewriteRuleNodeStream.new( @adaptor, desc, *element )
54
+ #elsif element
55
+ # AST::RewriteRuleNodeStream.new( @adaptor, desc, element )
56
+ #else
57
+ # AST::RewriteRuleNodeStream.new( @adaptor, desc )
58
+ #end
38
59
  end
39
-
40
60
  end
41
61
  end
@@ -119,7 +119,6 @@ class RecognizerSharedState
119
119
  self.type = nil
120
120
  self.text = nil
121
121
  end
122
-
123
122
  end
124
123
  end
125
124
 
@@ -165,6 +164,7 @@ class BaseRecognizer
165
164
  attr_reader :grammar_file_name,
166
165
  :antlr_version,
167
166
  :antlr_version_string,
167
+ :library_version_string,
168
168
  :grammar_home
169
169
 
170
170
  attr_accessor :token_scheme, :default_rule
@@ -173,9 +173,10 @@ class BaseRecognizer
173
173
  # the code with the name of the grammar file and
174
174
  # the current version of ANTLR being used to generate
175
175
  # the code
176
- def generated_using(grammar_file, version_string)
176
+ def generated_using( grammar_file, antlr_version, library_version = nil )
177
177
  @grammar_file_name = grammar_file.freeze
178
- @antlr_version_string = version_string.freeze
178
+ @antlr_version_string = antlr_version.freeze
179
+ @library_version = Util.parse_version( library_version )
179
180
  if @antlr_version_string =~ /^(\d+)\.(\d+)(?:\.(\d+)(?:b(\d+))?)?(.*)$/
180
181
  @antlr_version = [$1, $2, $3, $4].map! { |str| str.to_i }
181
182
  timestamp = $5.strip
@@ -742,9 +743,8 @@ class BaseRecognizer
742
743
  # input symbol is.
743
744
  #
744
745
  # This is ignored for lexers.
745
-
746
746
  def current_input_symbol
747
- return nil
747
+ @input.look
748
748
  end
749
749
 
750
750
  # Consume tokens until one matches the given token or token set
@@ -811,7 +811,7 @@ class BaseRecognizer
811
811
  end
812
812
 
813
813
  def trace_in(rule_name, rule_index, input_symbol)
814
- @error_output.printf("--> enter %s on %s", rule_name, input_symbol)
814
+ @error_output.printf( "--> enter %s on %s", rule_name, input_symbol )
815
815
  @state.backtracking > 0 and @error_output.printf(
816
816
  " (in backtracking mode: depth = %s)", @state.backtracking
817
817
  )
@@ -958,6 +958,10 @@ class Lexer < BaseRecognizer
958
958
  end
959
959
  end
960
960
 
961
+ def current_input_symbol
962
+ nil
963
+ end
964
+
961
965
  def next_token
962
966
  loop do
963
967
  @state.token = nil
@@ -1110,7 +1114,8 @@ class Lexer < BaseRecognizer
1110
1114
  @input.consume
1111
1115
  end
1112
1116
 
1113
- private
1117
+
1118
+ private
1114
1119
 
1115
1120
  def trace_in(rule_name, rule_index)
1116
1121
  if symbol = @input.look and symbol != EOF then symbol = symbol.inspect
@@ -1200,18 +1205,14 @@ class Parser < BaseRecognizer
1200
1205
  end
1201
1206
  end
1202
1207
 
1203
- def initialize(input, options = {})
1204
- super(options)
1208
+ def initialize( input, options = {} )
1209
+ super( options )
1205
1210
  @input = nil
1206
1211
  reset
1207
1212
  input = cast_input( input, options ) unless TokenStream === input
1208
1213
  @input = input
1209
1214
  end
1210
1215
 
1211
- def current_input_symbol
1212
- @input.look
1213
- end
1214
-
1215
1216
  def missing_symbol(error, expected_type, follow)
1216
1217
  current = @input.look
1217
1218
  current = @input.look(-1) if current == ANTLR3::EOF_TOKEN
@@ -1239,15 +1240,15 @@ class Parser < BaseRecognizer
1239
1240
  def source_name
1240
1241
  @input.source_name
1241
1242
  end
1242
-
1243
+
1243
1244
  private
1244
1245
 
1245
1246
  def trace_in(rule_name, rule_index)
1246
- super(rule_name, rule_index, @input.look.inspect)
1247
+ super( rule_name, rule_index, @input.look.inspect )
1247
1248
  end
1248
1249
 
1249
1250
  def trace_out(rule_name, rule_index)
1250
- super(rule_name, rule_index, @input.look.inspect)
1251
+ super( rule_name, rule_index, @input.look.inspect )
1251
1252
  end
1252
1253
 
1253
1254
  def cast_input( input, options )