antlr4 0.9.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
 - data/LICENSE +27 -0
 - data/README.md +46 -0
 - data/lib/antlr4.rb +262 -0
 - data/lib/antlr4/BufferedTokenStream.rb +306 -0
 - data/lib/antlr4/CommonTokenFactory.rb +53 -0
 - data/lib/antlr4/CommonTokenStream.rb +56 -0
 - data/lib/antlr4/FileStream.rb +14 -0
 - data/lib/antlr4/InputStream.rb +82 -0
 - data/lib/antlr4/IntervalSet.rb +341 -0
 - data/lib/antlr4/LL1Analyzer.rb +177 -0
 - data/lib/antlr4/Lexer.rb +335 -0
 - data/lib/antlr4/ListTokenSource.rb +140 -0
 - data/lib/antlr4/Parser.rb +562 -0
 - data/lib/antlr4/ParserInterpreter.rb +149 -0
 - data/lib/antlr4/ParserRuleContext.rb +162 -0
 - data/lib/antlr4/PredictionContext.rb +690 -0
 - data/lib/antlr4/Recognizer.rb +162 -0
 - data/lib/antlr4/RuleContext.rb +226 -0
 - data/lib/antlr4/Token.rb +124 -0
 - data/lib/antlr4/TokenFactory.rb +3 -0
 - data/lib/antlr4/TokenSource.rb +4 -0
 - data/lib/antlr4/TokenStream.rb +3 -0
 - data/lib/antlr4/TraceListener.rb +23 -0
 - data/lib/antlr4/atn/ATN.rb +133 -0
 - data/lib/antlr4/atn/ATNConfig.rb +146 -0
 - data/lib/antlr4/atn/ATNConfigSet.rb +215 -0
 - data/lib/antlr4/atn/ATNDeserializationOptions.rb +62 -0
 - data/lib/antlr4/atn/ATNDeserializer.rb +604 -0
 - data/lib/antlr4/atn/ATNSimulator.rb +43 -0
 - data/lib/antlr4/atn/ATNState.rb +253 -0
 - data/lib/antlr4/atn/ATNType.rb +22 -0
 - data/lib/antlr4/atn/LexerATNSimulator.rb +612 -0
 - data/lib/antlr4/atn/LexerAction.rb +311 -0
 - data/lib/antlr4/atn/LexerActionExecutor.rb +134 -0
 - data/lib/antlr4/atn/ParserATNSimulator.rb +1622 -0
 - data/lib/antlr4/atn/PredictionMode.rb +525 -0
 - data/lib/antlr4/atn/SemanticContext.rb +355 -0
 - data/lib/antlr4/atn/Transition.rb +297 -0
 - data/lib/antlr4/base.rb +60 -0
 - data/lib/antlr4/dfa/DFA.rb +128 -0
 - data/lib/antlr4/dfa/DFASerializer.rb +77 -0
 - data/lib/antlr4/dfa/DFAState.rb +133 -0
 - data/lib/antlr4/error.rb +151 -0
 - data/lib/antlr4/error/DiagnosticErrorListener.rb +136 -0
 - data/lib/antlr4/error/ErrorListener.rb +109 -0
 - data/lib/antlr4/error/ErrorStrategy.rb +742 -0
 - data/lib/antlr4/tree/Chunk.rb +31 -0
 - data/lib/antlr4/tree/ParseTreeMatch.rb +105 -0
 - data/lib/antlr4/tree/ParseTreePattern.rb +70 -0
 - data/lib/antlr4/tree/ParseTreePatternMatcher.rb +334 -0
 - data/lib/antlr4/tree/RuleTagToken.rb +39 -0
 - data/lib/antlr4/tree/TokenTagToken.rb +38 -0
 - data/lib/antlr4/tree/Tree.rb +204 -0
 - data/lib/antlr4/tree/Trees.rb +111 -0
 - data/lib/antlr4/version.rb +5 -0
 - data/lib/antlr4/xpath/XPath.rb +354 -0
 - data/lib/double_key_map.rb +78 -0
 - data/lib/java_symbols.rb +24 -0
 - data/lib/uuid.rb +87 -0
 - data/test/test_intervalset.rb +664 -0
 - data/test/test_tree.rb +140 -0
 - data/test/test_uuid.rb +122 -0
 - metadata +109 -0
 
    
        checksums.yaml
    ADDED
    
    | 
         @@ -0,0 +1,7 @@ 
     | 
|
| 
      
 1 
     | 
    
         
            +
            ---
         
     | 
| 
      
 2 
     | 
    
         
            +
            SHA1:
         
     | 
| 
      
 3 
     | 
    
         
            +
              metadata.gz: e8c6c33daacd490242fa9315d0cb2ff929f6997b
         
     | 
| 
      
 4 
     | 
    
         
            +
              data.tar.gz: edfe99a049c875a02ad10bd9bd4f610df729513b
         
     | 
| 
      
 5 
     | 
    
         
            +
            SHA512:
         
     | 
| 
      
 6 
     | 
    
         
            +
              metadata.gz: 1751abe7b1fb45ff15f21ee131da1fb7eac7cb53a22dc51bb977b6c8039f29b178aa1edd235d747a01e9267616644285c1cadcba96d096ccd5e0a642df08a809
         
     | 
| 
      
 7 
     | 
    
         
            +
              data.tar.gz: 9b33692abff06abd0ddb9ca7a970c7d3d29ecd810a2a303e83ff79214d6e8ca4ad91126bccb5e14768add6271d6f0125fd2eb4336a6ee16b2cdd3e342bdc156d
         
     | 
    
        data/LICENSE
    ADDED
    
    | 
         @@ -0,0 +1,27 @@ 
     | 
|
| 
      
 1 
     | 
    
         
            +
            Copyright (c) 2014, Chad Slaughter <chad.slaughter@gmail.com>
         
     | 
| 
      
 2 
     | 
    
         
            +
            All rights reserved.
         
     | 
| 
      
 3 
     | 
    
         
            +
             
     | 
| 
      
 4 
     | 
    
         
            +
            Redistribution and use in source and binary forms, with or without
         
     | 
| 
      
 5 
     | 
    
         
            +
            modification, are permitted provided that the following conditions are met:
         
     | 
| 
      
 6 
     | 
    
         
            +
             
     | 
| 
      
 7 
     | 
    
         
            +
            1. Redistributions of source code must retain the above copyright notice, this
         
     | 
| 
      
 8 
     | 
    
         
            +
            list of conditions and the following disclaimer.
         
     | 
| 
      
 9 
     | 
    
         
            +
             
     | 
| 
      
 10 
     | 
    
         
            +
            2. Redistributions in binary form must reproduce the above copyright notice,
         
     | 
| 
      
 11 
     | 
    
         
            +
            this list of conditions and the following disclaimer in the documentation
         
     | 
| 
      
 12 
     | 
    
         
            +
            and/or other materials provided with the distribution.
         
     | 
| 
      
 13 
     | 
    
         
            +
             
     | 
| 
      
 14 
     | 
    
         
            +
            3. Neither the name of the copyright holder nor the names of its contributors
         
     | 
| 
      
 15 
     | 
    
         
            +
            may be used to endorse or promote products derived from this software without
         
     | 
| 
      
 16 
     | 
    
         
            +
            specific prior written permission.
         
     | 
| 
      
 17 
     | 
    
         
            +
             
     | 
| 
      
 18 
     | 
    
         
            +
            THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
         
     | 
| 
      
 19 
     | 
    
         
            +
            AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
         
     | 
| 
      
 20 
     | 
    
         
            +
            IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
         
     | 
| 
      
 21 
     | 
    
         
            +
            DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
         
     | 
| 
      
 22 
     | 
    
         
            +
            FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
         
     | 
| 
      
 23 
     | 
    
         
            +
            DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
         
     | 
| 
      
 24 
     | 
    
         
            +
            SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
         
     | 
| 
      
 25 
     | 
    
         
            +
            CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
         
     | 
| 
      
 26 
     | 
    
         
            +
            OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
         
     | 
| 
      
 27 
     | 
    
         
            +
            OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
         
     | 
    
        data/README.md
    ADDED
    
    | 
         @@ -0,0 +1,46 @@ 
     | 
|
| 
      
 1 
     | 
    
         
            +
            This is the Ruby runtime and target for Antlr4. 
         
     | 
| 
      
 2 
     | 
    
         
            +
             
     | 
| 
      
 3 
     | 
    
         
            +
            This is based on the Python3 & Java runtime & targets.
         
     | 
| 
      
 4 
     | 
    
         
            +
            It includes the Java code for antlr to use *language = Ruby ;* and
         
     | 
| 
      
 5 
     | 
    
         
            +
            the runtime gem for the ruby code to run. There is a simplistic 
         
     | 
| 
      
 6 
     | 
    
         
            +
            _bin/antlr-testrig_ for quickly loading a Lexer/Parser pair and
         
     | 
| 
      
 7 
     | 
    
         
            +
            running them on an input file. 
         
     | 
| 
      
 8 
     | 
    
         
            +
             
     | 
| 
      
 9 
     | 
    
         
            +
            Usage
         
     | 
| 
      
 10 
     | 
    
         
            +
            -----
         
     | 
| 
      
 11 
     | 
    
         
            +
            To use the Ruby language codegen for Antlrv4 do the following.
         
     | 
| 
      
 12 
     | 
    
         
            +
             
     | 
| 
      
 13 
     | 
    
         
            +
            1. Create the jar file.
         
     | 
| 
      
 14 
     | 
    
         
            +
                ```% make antlr4-ruby.jar ```
         
     | 
| 
      
 15 
     | 
    
         
            +
             
     | 
| 
      
 16 
     | 
    
         
            +
            2. Put the jar file in your java class path 
         
     | 
| 
      
 17 
     | 
    
         
            +
             
     | 
| 
      
 18 
     | 
    
         
            +
            3. Use either the _options_ section  or the _-Dlanguage=_ switch
         
     | 
| 
      
 19 
     | 
    
         
            +
             
     | 
| 
      
 20 
     | 
    
         
            +
            ```
         
     | 
| 
      
 21 
     | 
    
         
            +
              options {  language = Ruby ; }
         
     | 
| 
      
 22 
     | 
    
         
            +
            ```
         
     | 
| 
      
 23 
     | 
    
         
            +
             
     | 
| 
      
 24 
     | 
    
         
            +
            ```
         
     | 
| 
      
 25 
     | 
    
         
            +
            java org.antlr.v4.Tool -Dlanguage=Ruby input.g4
         
     | 
| 
      
 26 
     | 
    
         
            +
            ```
         
     | 
| 
      
 27 
     | 
    
         
            +
             
     | 
| 
      
 28 
     | 
    
         
            +
            Build gem for use by Ruby code. It is placed in _pkg_.
         
     | 
| 
      
 29 
     | 
    
         
            +
            ```
         
     | 
| 
      
 30 
     | 
    
         
            +
            rake build 
         
     | 
| 
      
 31 
     | 
    
         
            +
            ```
         
     | 
| 
      
 32 
     | 
    
         
            +
             
     | 
| 
      
 33 
     | 
    
         
            +
            You can then install with Bundler or Rubygems directly.
         
     | 
| 
      
 34 
     | 
    
         
            +
             
     | 
| 
      
 35 
     | 
    
         
            +
             
     | 
| 
      
 36 
     | 
    
         
            +
            Missing Features
         
     | 
| 
      
 37 
     | 
    
         
            +
            ----------------
         
     | 
| 
      
 38 
     | 
    
         
            +
            * Ruby runtime is not in a proper module and library setup
         
     | 
| 
      
 39 
     | 
    
         
            +
            * The Ruby test for integration with antlr4 are missing
         
     | 
| 
      
 40 
     | 
    
         
            +
            * Proper attribution of all code
         
     | 
| 
      
 41 
     | 
    
         
            +
             
     | 
| 
      
 42 
     | 
    
         
            +
             
     | 
| 
      
 43 
     | 
    
         
            +
            ### Fixed Bugs ###
         
     | 
| 
      
 44 
     | 
    
         
            +
            * Ruby MRI encoding causes fails with some generated ATNs.
         
     | 
| 
      
 45 
     | 
    
         
            +
             This was fixed by using \x instead of \u for ATN encoding.
         
     | 
| 
      
 46 
     | 
    
         
            +
             
     | 
    
        data/lib/antlr4.rb
    ADDED
    
    | 
         @@ -0,0 +1,262 @@ 
     | 
|
| 
      
 1 
     | 
    
         
            +
            require 'double_key_map'
         
     | 
| 
      
 2 
     | 
    
         
            +
            require 'java_symbols'
         
     | 
| 
      
 3 
     | 
    
         
            +
            require 'uuid'
         
     | 
| 
      
 4 
     | 
    
         
            +
             
     | 
| 
      
 5 
     | 
    
         
            +
            require 'stringio'
         
     | 
| 
      
 6 
     | 
    
         
            +
            require 'set'
         
     | 
| 
      
 7 
     | 
    
         
            +
            require 'forwardable'
         
     | 
| 
      
 8 
     | 
    
         
            +
             
     | 
| 
      
 9 
     | 
    
         
            +
            require 'antlr4/base'
         
     | 
| 
      
 10 
     | 
    
         
            +
            require 'antlr4/version'
         
     | 
| 
      
 11 
     | 
    
         
            +
             
     | 
| 
      
 12 
     | 
    
         
            +
            require 'antlr4/Token'
         
     | 
| 
      
 13 
     | 
    
         
            +
            require 'antlr4/InputStream'
         
     | 
| 
      
 14 
     | 
    
         
            +
            require 'antlr4/FileStream'
         
     | 
| 
      
 15 
     | 
    
         
            +
            require 'antlr4/TokenStream'
         
     | 
| 
      
 16 
     | 
    
         
            +
             
     | 
| 
      
 17 
     | 
    
         
            +
            require 'antlr4/error'
         
     | 
| 
      
 18 
     | 
    
         
            +
            require 'antlr4/error/ErrorListener'
         
     | 
| 
      
 19 
     | 
    
         
            +
            require 'antlr4/error/DiagnosticErrorListener'
         
     | 
| 
      
 20 
     | 
    
         
            +
            require 'antlr4/error/ErrorStrategy'
         
     | 
| 
      
 21 
     | 
    
         
            +
             
     | 
| 
      
 22 
     | 
    
         
            +
            require 'antlr4/BufferedTokenStream'
         
     | 
| 
      
 23 
     | 
    
         
            +
            require 'antlr4/CommonTokenStream'
         
     | 
| 
      
 24 
     | 
    
         
            +
             
     | 
| 
      
 25 
     | 
    
         
            +
            require 'antlr4/IntervalSet'
         
     | 
| 
      
 26 
     | 
    
         
            +
             
     | 
| 
      
 27 
     | 
    
         
            +
            require 'antlr4/tree/Trees'
         
     | 
| 
      
 28 
     | 
    
         
            +
            require 'antlr4/tree/Tree'
         
     | 
| 
      
 29 
     | 
    
         
            +
            require 'antlr4/tree/TokenTagToken'
         
     | 
| 
      
 30 
     | 
    
         
            +
            require 'antlr4/tree/RuleTagToken'
         
     | 
| 
      
 31 
     | 
    
         
            +
            require 'antlr4/tree/ParseTreeMatch'
         
     | 
| 
      
 32 
     | 
    
         
            +
            require 'antlr4/tree/ParseTreePatternMatcher'
         
     | 
| 
      
 33 
     | 
    
         
            +
            # 
         
     | 
| 
      
 34 
     | 
    
         
            +
            require 'antlr4/Recognizer'
         
     | 
| 
      
 35 
     | 
    
         
            +
            require 'antlr4/TokenSource'
         
     | 
| 
      
 36 
     | 
    
         
            +
            require 'antlr4/ListTokenSource'
         
     | 
| 
      
 37 
     | 
    
         
            +
            require 'antlr4/TokenFactory'
         
     | 
| 
      
 38 
     | 
    
         
            +
            require 'antlr4/CommonTokenFactory'
         
     | 
| 
      
 39 
     | 
    
         
            +
             
     | 
| 
      
 40 
     | 
    
         
            +
            require 'antlr4/RuleContext'
         
     | 
| 
      
 41 
     | 
    
         
            +
            require 'antlr4/ParserRuleContext'
         
     | 
| 
      
 42 
     | 
    
         
            +
            require 'antlr4/PredictionContext'
         
     | 
| 
      
 43 
     | 
    
         
            +
             
     | 
| 
      
 44 
     | 
    
         
            +
            require 'antlr4/LL1Analyzer'
         
     | 
| 
      
 45 
     | 
    
         
            +
             
     | 
| 
      
 46 
     | 
    
         
            +
            require 'antlr4/dfa/DFA'
         
     | 
| 
      
 47 
     | 
    
         
            +
            require 'antlr4/dfa/DFAState'
         
     | 
| 
      
 48 
     | 
    
         
            +
            require 'antlr4/dfa/DFASerializer'
         
     | 
| 
      
 49 
     | 
    
         
            +
             
     | 
| 
      
 50 
     | 
    
         
            +
            require 'antlr4/atn/ATNType'
         
     | 
| 
      
 51 
     | 
    
         
            +
            require 'antlr4/atn/ATNState'
         
     | 
| 
      
 52 
     | 
    
         
            +
            require 'antlr4/atn/ATN'
         
     | 
| 
      
 53 
     | 
    
         
            +
            require 'antlr4/atn/ATNConfig'
         
     | 
| 
      
 54 
     | 
    
         
            +
            require 'antlr4/atn/ATNConfigSet'
         
     | 
| 
      
 55 
     | 
    
         
            +
            require 'antlr4/atn/Transition'
         
     | 
| 
      
 56 
     | 
    
         
            +
            require 'antlr4/atn/ATNSimulator'
         
     | 
| 
      
 57 
     | 
    
         
            +
            require 'antlr4/atn/SemanticContext'
         
     | 
| 
      
 58 
     | 
    
         
            +
            require 'antlr4/atn/LexerAction'
         
     | 
| 
      
 59 
     | 
    
         
            +
            require 'antlr4/atn/LexerActionExecutor'
         
     | 
| 
      
 60 
     | 
    
         
            +
            require 'antlr4/atn/PredictionMode'
         
     | 
| 
      
 61 
     | 
    
         
            +
            require 'antlr4/atn/ATNDeserializationOptions'
         
     | 
| 
      
 62 
     | 
    
         
            +
            require 'antlr4/atn/LexerATNSimulator'
         
     | 
| 
      
 63 
     | 
    
         
            +
            require 'antlr4/atn/ParserATNSimulator'
         
     | 
| 
      
 64 
     | 
    
         
            +
            require 'antlr4/atn/ATNDeserializer'
         
     | 
| 
      
 65 
     | 
    
         
            +
             
     | 
| 
      
 66 
     | 
    
         
            +
            require 'antlr4/Parser'
         
     | 
| 
      
 67 
     | 
    
         
            +
            require 'antlr4/TraceListener'
         
     | 
| 
      
 68 
     | 
    
         
            +
            require 'antlr4/ParserInterpreter'
         
     | 
| 
      
 69 
     | 
    
         
            +
            require 'antlr4/Lexer'
         
     | 
| 
      
 70 
     | 
    
         
            +
             
     | 
| 
      
 71 
     | 
    
         
            +
             
     | 
| 
      
 72 
     | 
    
         
            +
            __END__
         
     | 
| 
      
 73 
     | 
    
         
            +
            CommonTokenFactory.rb:3:require 'Token'
         
     | 
| 
      
 74 
     | 
    
         
            +
            CommonTokenFactory.rb:5:require 'TokenFactory'
         
     | 
| 
      
 75 
     | 
    
         
            +
             
     | 
| 
      
 76 
     | 
    
         
            +
            FileStream.rb:7:require 'InputStream'
         
     | 
| 
      
 77 
     | 
    
         
            +
            InputStream.rb:6:require 'Token'
         
     | 
| 
      
 78 
     | 
    
         
            +
            IntervalSet.rb:3:require 'Token'
         
     | 
| 
      
 79 
     | 
    
         
            +
            IntervalSet.rb:4:require 'set'
         
     | 
| 
      
 80 
     | 
    
         
            +
            IntervalSet.rb:19:require 'forwardable'
         
     | 
| 
      
 81 
     | 
    
         
            +
            LL1Analyzer.rb:3:require 'Token'
         
     | 
| 
      
 82 
     | 
    
         
            +
            LL1Analyzer.rb:11:require 'set'
         
     | 
| 
      
 83 
     | 
    
         
            +
            Lexer.rb:15:require 'CommonTokenFactory'
         
     | 
| 
      
 84 
     | 
    
         
            +
            Lexer.rb:16:require 'atn/LexerATNSimulator'
         
     | 
| 
      
 85 
     | 
    
         
            +
            Lexer.rb:17:require 'InputStream'
         
     | 
| 
      
 86 
     | 
    
         
            +
            Lexer.rb:18:require 'Recognizer'
         
     | 
| 
      
 87 
     | 
    
         
            +
            Lexer.rb:19:require 'error'
         
     | 
| 
      
 88 
     | 
    
         
            +
            Lexer.rb:20:require 'Token'
         
     | 
| 
      
 89 
     | 
    
         
            +
            Lexer.rb:21:require 'TokenSource'
         
     | 
| 
      
 90 
     | 
    
         
            +
            Lexer.rb:23:require 'java_symbols' 
         
     | 
| 
      
 91 
     | 
    
         
            +
            Lexer.rb:111:            raise IllegalStateException.new("nextToken requires a non-null input stream.")
         
     | 
| 
      
 92 
     | 
    
         
            +
            ListTokenSource.rb:13:require 'CommonTokenFactory'
         
     | 
| 
      
 93 
     | 
    
         
            +
            ListTokenSource.rb:14:require 'TokenSource'
         
     | 
| 
      
 94 
     | 
    
         
            +
            ListTokenSource.rb:15:require 'Token'
         
     | 
| 
      
 95 
     | 
    
         
            +
            Parser.rb:1:require 'TokenStream'
         
     | 
| 
      
 96 
     | 
    
         
            +
            Parser.rb:2:require 'TokenFactory'
         
     | 
| 
      
 97 
     | 
    
         
            +
            Parser.rb:3:require 'error'
         
     | 
| 
      
 98 
     | 
    
         
            +
            Parser.rb:4:require 'error/ErrorStrategy'
         
     | 
| 
      
 99 
     | 
    
         
            +
            Parser.rb:5:require 'InputStream'
         
     | 
| 
      
 100 
     | 
    
         
            +
            Parser.rb:6:require 'Recognizer'
         
     | 
| 
      
 101 
     | 
    
         
            +
            Parser.rb:7:require 'RuleContext'
         
     | 
| 
      
 102 
     | 
    
         
            +
            Parser.rb:8:require 'ParserRuleContext'
         
     | 
| 
      
 103 
     | 
    
         
            +
            Parser.rb:9:require 'Token'
         
     | 
| 
      
 104 
     | 
    
         
            +
            Parser.rb:10:require 'Lexer'
         
     | 
| 
      
 105 
     | 
    
         
            +
            Parser.rb:11:require 'tree/ParseTreePatternMatcher'
         
     | 
| 
      
 106 
     | 
    
         
            +
            Parser.rb:12:require 'tree/Tree'
         
     | 
| 
      
 107 
     | 
    
         
            +
            Parser.rb:19:require 'java_symbols'
         
     | 
| 
      
 108 
     | 
    
         
            +
            ParserInterpreter.rb:27:require 'TokenStream'
         
     | 
| 
      
 109 
     | 
    
         
            +
            ParserInterpreter.rb:28:require 'Parser'
         
     | 
| 
      
 110 
     | 
    
         
            +
            ParserInterpreter.rb:29:require 'ParserRuleContext'
         
     | 
| 
      
 111 
     | 
    
         
            +
            ParserInterpreter.rb:30:require 'Token'
         
     | 
| 
      
 112 
     | 
    
         
            +
            ParserInterpreter.rb:31:require 'error'
         
     | 
| 
      
 113 
     | 
    
         
            +
            ParserInterpreter.rb:33:require 'set'
         
     | 
| 
      
 114 
     | 
    
         
            +
            ParserRuleContext.rb:24:require 'RuleContext'
         
     | 
| 
      
 115 
     | 
    
         
            +
            ParserRuleContext.rb:25:require 'Token'
         
     | 
| 
      
 116 
     | 
    
         
            +
            ParserRuleContext.rb:26:require 'tree/Tree'
         
     | 
| 
      
 117 
     | 
    
         
            +
            PredictionContext.rb:6:require 'RuleContext'
         
     | 
| 
      
 118 
     | 
    
         
            +
            PredictionContext.rb:7:require 'double_key_map'
         
     | 
| 
      
 119 
     | 
    
         
            +
            Recognizer.rb:4:require 'RuleContext'
         
     | 
| 
      
 120 
     | 
    
         
            +
            Recognizer.rb:5:require 'Token'
         
     | 
| 
      
 121 
     | 
    
         
            +
            Recognizer.rb:6:require 'error/ErrorListener'
         
     | 
| 
      
 122 
     | 
    
         
            +
            Recognizer.rb:7:require 'error'
         
     | 
| 
      
 123 
     | 
    
         
            +
            RuleContext.rb:22:require 'stringio'
         
     | 
| 
      
 124 
     | 
    
         
            +
            RuleContext.rb:25:require 'tree/Tree'
         
     | 
| 
      
 125 
     | 
    
         
            +
            RuleContext.rb:26:require 'tree/Trees'
         
     | 
| 
      
 126 
     | 
    
         
            +
            TokenSource.rb:2:require 'Recognizer'
         
     | 
| 
      
 127 
     | 
    
         
            +
            atn/ATN.rb:1:require 'IntervalSet'
         
     | 
| 
      
 128 
     | 
    
         
            +
            atn/ATN.rb:2:require 'RuleContext'
         
     | 
| 
      
 129 
     | 
    
         
            +
            atn/ATN.rb:4:require 'Token'
         
     | 
| 
      
 130 
     | 
    
         
            +
            atn/ATN.rb:5:require 'atn/ATNType'
         
     | 
| 
      
 131 
     | 
    
         
            +
            atn/ATN.rb:6:require 'atn/ATNState' 
         
     | 
| 
      
 132 
     | 
    
         
            +
            atn/ATN.rb:8:require 'java_symbols'
         
     | 
| 
      
 133 
     | 
    
         
            +
            atn/ATN.rb:50:        require 'LL1Analyzer'
         
     | 
| 
      
 134 
     | 
    
         
            +
            atn/ATNConfig.rb:13:require 'PredictionContext'
         
     | 
| 
      
 135 
     | 
    
         
            +
            atn/ATNConfig.rb:14:require 'atn/ATNState'
         
     | 
| 
      
 136 
     | 
    
         
            +
            atn/ATNConfig.rb:15:#require 'atn/LexerActionExecutor'
         
     | 
| 
      
 137 
     | 
    
         
            +
            atn/ATNConfig.rb:16:#require 'atn/SemanticContext'
         
     | 
| 
      
 138 
     | 
    
         
            +
            atn/ATNConfigSet.rb:5:require 'stringio'
         
     | 
| 
      
 139 
     | 
    
         
            +
            atn/ATNConfigSet.rb:6:require 'PredictionContext'
         
     | 
| 
      
 140 
     | 
    
         
            +
            atn/ATNConfigSet.rb:7:require 'atn/ATN'
         
     | 
| 
      
 141 
     | 
    
         
            +
            atn/ATNConfigSet.rb:8:require 'atn/ATNConfig'
         
     | 
| 
      
 142 
     | 
    
         
            +
            atn/ATNConfigSet.rb:9:#require 'atn/SemanticContext'
         
     | 
| 
      
 143 
     | 
    
         
            +
            atn/ATNConfigSet.rb:10:require 'error'
         
     | 
| 
      
 144 
     | 
    
         
            +
            atn/ATNConfigSet.rb:12:require 'forwardable'
         
     | 
| 
      
 145 
     | 
    
         
            +
            atn/ATNDeserializer.rb:2:require 'stringio'
         
     | 
| 
      
 146 
     | 
    
         
            +
            atn/ATNDeserializer.rb:3:require 'Token'
         
     | 
| 
      
 147 
     | 
    
         
            +
            atn/ATNDeserializer.rb:4:require 'atn/ATN'
         
     | 
| 
      
 148 
     | 
    
         
            +
            atn/ATNDeserializer.rb:5:require 'atn/ATNType'
         
     | 
| 
      
 149 
     | 
    
         
            +
            atn/ATNDeserializer.rb:6:require 'atn/ATNState'
         
     | 
| 
      
 150 
     | 
    
         
            +
            atn/ATNDeserializer.rb:7:require 'atn/Transition'
         
     | 
| 
      
 151 
     | 
    
         
            +
            atn/ATNDeserializer.rb:8:require 'atn/LexerAction'
         
     | 
| 
      
 152 
     | 
    
         
            +
            atn/ATNDeserializer.rb:9:require 'atn/ATNDeserializationOptions'
         
     | 
| 
      
 153 
     | 
    
         
            +
            atn/ATNDeserializer.rb:11:require 'uuid'
         
     | 
| 
      
 154 
     | 
    
         
            +
            atn/ATNSimulator.rb:6:require 'PredictionContext'
         
     | 
| 
      
 155 
     | 
    
         
            +
            atn/ATNSimulator.rb:7:require 'atn/ATN'
         
     | 
| 
      
 156 
     | 
    
         
            +
            atn/ATNSimulator.rb:8:require 'atn/ATNConfigSet'
         
     | 
| 
      
 157 
     | 
    
         
            +
            atn/ATNSimulator.rb:9:require 'dfa/DFAState'
         
     | 
| 
      
 158 
     | 
    
         
            +
            atn/ATNSimulator.rb:31:    #  For the Java grammar on java.*, it dropped the memory requirements
         
     | 
| 
      
 159 
     | 
    
         
            +
            atn/LexerATNSimulator.rb:16:require 'Lexer'
         
     | 
| 
      
 160 
     | 
    
         
            +
            atn/LexerATNSimulator.rb:17:require 'PredictionContext'
         
     | 
| 
      
 161 
     | 
    
         
            +
            atn/LexerATNSimulator.rb:18:require 'InputStream'
         
     | 
| 
      
 162 
     | 
    
         
            +
            atn/LexerATNSimulator.rb:19:require 'Token'
         
     | 
| 
      
 163 
     | 
    
         
            +
            atn/LexerATNSimulator.rb:20:require 'atn/ATN'
         
     | 
| 
      
 164 
     | 
    
         
            +
            atn/LexerATNSimulator.rb:21:require 'atn/ATNConfig'
         
     | 
| 
      
 165 
     | 
    
         
            +
            atn/LexerATNSimulator.rb:22:require 'atn/ATNSimulator'
         
     | 
| 
      
 166 
     | 
    
         
            +
            atn/LexerATNSimulator.rb:23:require 'atn/ATNConfigSet'
         
     | 
| 
      
 167 
     | 
    
         
            +
            atn/LexerATNSimulator.rb:24:require 'atn/ATNState'
         
     | 
| 
      
 168 
     | 
    
         
            +
            atn/LexerATNSimulator.rb:25:require 'atn/LexerActionExecutor'
         
     | 
| 
      
 169 
     | 
    
         
            +
            atn/LexerATNSimulator.rb:26:require 'atn/Transition'
         
     | 
| 
      
 170 
     | 
    
         
            +
            atn/LexerATNSimulator.rb:27:require 'dfa/DFAState'
         
     | 
| 
      
 171 
     | 
    
         
            +
            atn/LexerATNSimulator.rb:28:require 'error'
         
     | 
| 
      
 172 
     | 
    
         
            +
            atn/LexerATNSimulator.rb:30:require 'java_symbols'
         
     | 
| 
      
 173 
     | 
    
         
            +
            atn/LexerAction.rb:4:require 'java_symbols'
         
     | 
| 
      
 174 
     | 
    
         
            +
            atn/LexerAction.rb:273:# <p>This action is not serialized as part of the ATN, and is only required for
         
     | 
| 
      
 175 
     | 
    
         
            +
            atn/LexerAction.rb:283:    # <p>Note: This class is only required for lexer actions for which
         
     | 
| 
      
 176 
     | 
    
         
            +
            atn/LexerActionExecutor.rb:8:require 'InputStream'
         
     | 
| 
      
 177 
     | 
    
         
            +
            atn/LexerActionExecutor.rb:10:require 'atn/LexerAction' 
         
     | 
| 
      
 178 
     | 
    
         
            +
            atn/LexerActionExecutor.rb:112:        requiresSeek = false
         
     | 
| 
      
 179 
     | 
    
         
            +
            atn/LexerActionExecutor.rb:120:                    requiresSeek = (startIndex + offset) != stopIndex
         
     | 
| 
      
 180 
     | 
    
         
            +
            atn/LexerActionExecutor.rb:123:                    requiresSeek = false
         
     | 
| 
      
 181 
     | 
    
         
            +
            atn/LexerActionExecutor.rb:128:            input.seek(stopIndex) if requiresSeek
         
     | 
| 
      
 182 
     | 
    
         
            +
            atn/ParserATNSimulator.rb:49:# than interpreting and much more complicated. Also required a huge amount of
         
     | 
| 
      
 183 
     | 
    
         
            +
            atn/ParserATNSimulator.rb:225:# both SLL and LL parsing. Erroneous input will therefore require 2 passes over
         
     | 
| 
      
 184 
     | 
    
         
            +
            atn/ParserATNSimulator.rb:228:require 'dfa/DFA'
         
     | 
| 
      
 185 
     | 
    
         
            +
            atn/ParserATNSimulator.rb:229:require 'PredictionContext'
         
     | 
| 
      
 186 
     | 
    
         
            +
            atn/ParserATNSimulator.rb:230:require 'TokenStream'
         
     | 
| 
      
 187 
     | 
    
         
            +
            atn/ParserATNSimulator.rb:231:require 'Parser'
         
     | 
| 
      
 188 
     | 
    
         
            +
            atn/ParserATNSimulator.rb:232:require 'ParserRuleContext'
         
     | 
| 
      
 189 
     | 
    
         
            +
            atn/ParserATNSimulator.rb:233:require 'RuleContext'
         
     | 
| 
      
 190 
     | 
    
         
            +
            atn/ParserATNSimulator.rb:234:require 'Token'
         
     | 
| 
      
 191 
     | 
    
         
            +
            atn/ParserATNSimulator.rb:235:require 'atn/ATN'
         
     | 
| 
      
 192 
     | 
    
         
            +
            atn/ParserATNSimulator.rb:236:require 'atn/ATNConfig'
         
     | 
| 
      
 193 
     | 
    
         
            +
            atn/ParserATNSimulator.rb:237:require 'atn/ATNConfigSet'
         
     | 
| 
      
 194 
     | 
    
         
            +
            atn/ParserATNSimulator.rb:238:require 'atn/ATNSimulator'
         
     | 
| 
      
 195 
     | 
    
         
            +
            atn/ParserATNSimulator.rb:239:require 'atn/ATNState'
         
     | 
| 
      
 196 
     | 
    
         
            +
            atn/ParserATNSimulator.rb:240:require 'atn/PredictionMode'
         
     | 
| 
      
 197 
     | 
    
         
            +
            atn/ParserATNSimulator.rb:241:require 'atn/SemanticContext'
         
     | 
| 
      
 198 
     | 
    
         
            +
            atn/ParserATNSimulator.rb:242:require 'atn/Transition'
         
     | 
| 
      
 199 
     | 
    
         
            +
            atn/ParserATNSimulator.rb:243:require 'dfa/DFAState'
         
     | 
| 
      
 200 
     | 
    
         
            +
            atn/ParserATNSimulator.rb:244:require 'error'
         
     | 
| 
      
 201 
     | 
    
         
            +
            atn/ParserATNSimulator.rb:442:            if cD.requiresFullContext and self.predictionMode != PredictionMode.SLL
         
     | 
| 
      
 202 
     | 
    
         
            +
            atn/ParserATNSimulator.rb:560:            cD.requiresFullContext = true
         
     | 
| 
      
 203 
     | 
    
         
            +
            atn/ParserATNSimulator.rb:716:        # For full-context reach operations, separate handling is required to
         
     | 
| 
      
 204 
     | 
    
         
            +
            atn/ParserATNSimulator.rb:799:            # required.
         
     | 
| 
      
 205 
     | 
    
         
            +
            atn/PredictionMode.rb:11:require 'atn/ATN'
         
     | 
| 
      
 206 
     | 
    
         
            +
            atn/PredictionMode.rb:12:require 'atn/ATNConfig'
         
     | 
| 
      
 207 
     | 
    
         
            +
            atn/PredictionMode.rb:13:require 'atn/ATNConfigSet'
         
     | 
| 
      
 208 
     | 
    
         
            +
            atn/PredictionMode.rb:14:require 'atn/ATNState'
         
     | 
| 
      
 209 
     | 
    
         
            +
            atn/PredictionMode.rb:15:require 'atn/SemanticContext'
         
     | 
| 
      
 210 
     | 
    
         
            +
            atn/PredictionMode.rb:16:require 'java_symbols'
         
     | 
| 
      
 211 
     | 
    
         
            +
            atn/PredictionMode.rb:34:    # that the particular combination of grammar and input requires the more
         
     | 
| 
      
 212 
     | 
    
         
            +
            atn/PredictionMode.rb:287:    # <p>No special consideration for semantic predicates is required because
         
     | 
| 
      
 213 
     | 
    
         
            +
            atn/SemanticContext.rb:11:require 'Recognizer'
         
     | 
| 
      
 214 
     | 
    
         
            +
            atn/SemanticContext.rb:12:require 'RuleContext'
         
     | 
| 
      
 215 
     | 
    
         
            +
            atn/Transition.rb:15:require 'IntervalSet'
         
     | 
| 
      
 216 
     | 
    
         
            +
            atn/Transition.rb:16:require 'Token'
         
     | 
| 
      
 217 
     | 
    
         
            +
            atn/Transition.rb:18:require 'atn/SemanticContext'
         
     | 
| 
      
 218 
     | 
    
         
            +
            atn/Transition.rb:20:require 'java_symbols'
         
     | 
| 
      
 219 
     | 
    
         
            +
            base.rb:1:require 'Token' 
         
     | 
| 
      
 220 
     | 
    
         
            +
            base.rb:2:require 'FileStream'
         
     | 
| 
      
 221 
     | 
    
         
            +
            base.rb:3:require 'TokenStream' 
         
     | 
| 
      
 222 
     | 
    
         
            +
            base.rb:4:require 'BufferedTokenStream'
         
     | 
| 
      
 223 
     | 
    
         
            +
            base.rb:5:require 'CommonTokenStream'
         
     | 
| 
      
 224 
     | 
    
         
            +
            base.rb:6:require 'Lexer'
         
     | 
| 
      
 225 
     | 
    
         
            +
            base.rb:7:require 'Parser'
         
     | 
| 
      
 226 
     | 
    
         
            +
            base.rb:8:require 'dfa/DFA'
         
     | 
| 
      
 227 
     | 
    
         
            +
            base.rb:9:require 'atn/ATN'
         
     | 
| 
      
 228 
     | 
    
         
            +
            base.rb:10:require 'atn/ATNDeserializer'
         
     | 
| 
      
 229 
     | 
    
         
            +
            base.rb:11:require 'atn/LexerATNSimulator'
         
     | 
| 
      
 230 
     | 
    
         
            +
            base.rb:12:require 'atn/ParserATNSimulator' 
         
     | 
| 
      
 231 
     | 
    
         
            +
            base.rb:13:require 'atn/PredictionMode'
         
     | 
| 
      
 232 
     | 
    
         
            +
            base.rb:14:require 'PredictionContext'
         
     | 
| 
      
 233 
     | 
    
         
            +
            base.rb:15:require 'ParserRuleContext'
         
     | 
| 
      
 234 
     | 
    
         
            +
            base.rb:16:require 'tree/Tree' # import ParseTreeListener, ParseTreeVisitor, ParseTreeWalker, TerminalNode, ErrorNode, RuleNode
         
     | 
| 
      
 235 
     | 
    
         
            +
            base.rb:17:require 'error'    #  Errors import RecognitionException, IllegalStateException, NoViableAltException
         
     | 
| 
      
 236 
     | 
    
         
            +
            base.rb:18:require 'error/ErrorStrategy' # import BailErrorStrategy
         
     | 
| 
      
 237 
     | 
    
         
            +
            base.rb:19:require 'error/DiagnosticErrorListener' # import DiagnosticErrorListener
         
     | 
| 
      
 238 
     | 
    
         
            +
            base.rb:21:require 'java_symbols'
         
     | 
| 
      
 239 
     | 
    
         
            +
            dfa/DFA.rb:9:require 'dfa/DFASerializer'
         
     | 
| 
      
 240 
     | 
    
         
            +
            dfa/DFA.rb:102:                precedenceState.requiresFullContext = false
         
     | 
| 
      
 241 
     | 
    
         
            +
            dfa/DFASerializer.rb:3:require 'stringio'
         
     | 
| 
      
 242 
     | 
    
         
            +
            dfa/DFASerializer.rb:58:        s_requireContext = nil
         
     | 
| 
      
 243 
     | 
    
         
            +
            dfa/DFASerializer.rb:59:        s_requireContext = "^" if s.requiresFullContext 
         
     | 
| 
      
 244 
     | 
    
         
            +
            dfa/DFASerializer.rb:60:        baseStateStr = "s#{s.stateNumber}#{s_requireContext}"
         
     | 
| 
      
 245 
     | 
    
         
            +
            dfa/DFAState.rb:2:require 'stringio'
         
     | 
| 
      
 246 
     | 
    
         
            +
            dfa/DFAState.rb:45:    attr_accessor :lexerActionExecutor, :requiresFullContext, :predicates 
         
     | 
| 
      
 247 
     | 
    
         
            +
            dfa/DFAState.rb:55:        #  {@link #requiresFullContext}.
         
     | 
| 
      
 248 
     | 
    
         
            +
            dfa/DFAState.rb:62:        self.requiresFullContext = false
         
     | 
| 
      
 249 
     | 
    
         
            +
            dfa/DFAState.rb:65:        #  {@link #requiresFullContext} is {@code false} since full context prediction evaluates predicates
         
     | 
| 
      
 250 
     | 
    
         
            +
            dfa/DFAState.rb:69:        #  <p>We only use these for non-{@link #requiresFullContext} but conflicting states. That
         
     | 
| 
      
 251 
     | 
    
         
            +
            error/DiagnosticErrorListener.rb:52:require 'stringio'
         
     | 
| 
      
 252 
     | 
    
         
            +
            error/DiagnosticErrorListener.rb:53:require 'set'
         
     | 
| 
      
 253 
     | 
    
         
            +
            error/DiagnosticErrorListener.rb:56:require 'error/ErrorListener'
         
     | 
| 
      
 254 
     | 
    
         
            +
            error/ErrorStrategy.rb:32:# require 'IntervalSet' #from antlr4.IntervalSet import IntervalSet
         
     | 
| 
      
 255 
     | 
    
         
            +
            error/ErrorStrategy.rb:34:#require 'antlr4/Token' #from antlr4.Token import Token
         
     | 
| 
      
 256 
     | 
    
         
            +
            error/ErrorStrategy.rb:35:#require 'atn.ATNState' #from antlr4.atn.ATNState import ATNState
         
     | 
| 
      
 257 
     | 
    
         
            +
            error/ErrorStrategy.rb:323:    # This method is called to report a syntax error which requires the removal
         
     | 
| 
      
 258 
     | 
    
         
            +
            error/ErrorStrategy.rb:350:    # This method is called to report a syntax error which requires the
         
     | 
| 
      
 259 
     | 
    
         
            +
            error/ErrorStrategy.rb:640:    #  return normally.  Rule b would not find the required '^' though.
         
     | 
| 
      
 260 
     | 
    
         
            +
            xpath/XPath.rb:59:require 'TokenStream'
         
     | 
| 
      
 261 
     | 
    
         
            +
            xpath/XPath.rb:60:require 'CommonTokenStream'
         
     | 
| 
      
 262 
     | 
    
         
            +
            xpath/XPath.rb:61:require 'java_symbols'
         
     | 
| 
         @@ -0,0 +1,306 @@ 
     | 
|
| 
      
 1 
     | 
    
         
            +
             
     | 
| 
      
 2 
     | 
    
         
            +
            class BufferedTokenStream < TokenStream
         
     | 
| 
      
 3 
     | 
    
         
            +
             
     | 
| 
      
 4 
     | 
    
         
            +
                attr_accessor :tokenSource, :tokens, :index,:fetchedEOF 
         
     | 
| 
      
 5 
     | 
    
         
            +
                def initialize(_tokenSource)
         
     | 
| 
      
 6 
     | 
    
         
            +
                    # The {@link TokenSource} from which tokens for this stream are fetched.
         
     | 
| 
      
 7 
     | 
    
         
            +
                    @tokenSource = _tokenSource
         
     | 
| 
      
 8 
     | 
    
         
            +
                    # A collection of all tokens fetched from the token source. The list is
         
     | 
| 
      
 9 
     | 
    
         
            +
                    # considered a complete view of the input once {@link #fetchedEOF} is set
         
     | 
| 
      
 10 
     | 
    
         
            +
                    # to {@code true}.
         
     | 
| 
      
 11 
     | 
    
         
            +
                    self.tokens = Array.new
         
     | 
| 
      
 12 
     | 
    
         
            +
             
     | 
| 
      
 13 
     | 
    
         
            +
                    # The index into {@link #tokens} of the current token (next token to
         
     | 
| 
      
 14 
     | 
    
         
            +
                    # {@link #consume}). {@link #tokens}{@code [}{@link #p}{@code ]} should be
         
     | 
| 
      
 15 
     | 
    
         
            +
                    # {@link #LT LT(1)}.
         
     | 
| 
      
 16 
     | 
    
         
            +
                    #
         
     | 
| 
      
 17 
     | 
    
         
            +
                    # <p>This field is set to -1 when the stream is first constructed or when
         
     | 
| 
      
 18 
     | 
    
         
            +
                    # {@link #setTokenSource} is called, indicating that the first token has
         
     | 
| 
      
 19 
     | 
    
         
            +
                    # not yet been fetched from the token source. For additional information,
         
     | 
| 
      
 20 
     | 
    
         
            +
                    # see the documentation of {@link IntStream} for a description of
         
     | 
| 
      
 21 
     | 
    
         
            +
                    # Initializing Methods.</p>
         
     | 
| 
      
 22 
     | 
    
         
            +
                    self.index = -1
         
     | 
| 
      
 23 
     | 
    
         
            +
             
     | 
| 
      
 24 
     | 
    
         
            +
                    # Indicates whether the {@link Token#EOF} token has been fetched from
         
     | 
| 
      
 25 
     | 
    
         
            +
                    # {@link #tokenSource} and added to {@link #tokens}. This field improves
         
     | 
| 
      
 26 
     | 
    
         
            +
                    # performance for the following cases
         
     | 
| 
      
 27 
     | 
    
         
            +
                    #
         
     | 
| 
      
 28 
     | 
    
         
            +
                    # <ul>
         
     | 
| 
      
 29 
     | 
    
         
            +
                    # <li>{@link #consume}: The lookahead check in {@link #consume} to prevent
         
     | 
| 
      
 30 
     | 
    
         
            +
                    # consuming the EOF symbol is optimized by checking the values of
         
     | 
| 
      
 31 
     | 
    
         
            +
                    # {@link #fetchedEOF} and {@link #p} instead of calling {@link #LA}.</li>
         
     | 
| 
      
 32 
     | 
    
         
            +
                    # <li>{@link #fetch}: The check to prevent adding multiple EOF symbols into
         
     | 
| 
      
 33 
     | 
    
         
            +
                    # {@link #tokens} is trivial with this field.</li>
         
     | 
| 
      
 34 
     | 
    
         
            +
                    # <ul>
         
     | 
| 
      
 35 
     | 
    
         
            +
                    self.fetchedEOF = false
         
     | 
| 
      
 36 
     | 
    
         
            +
                end
         
     | 
| 
      
 37 
     | 
    
         
            +
             
     | 
| 
      
 38 
     | 
    
         
            +
                def mark
         
     | 
| 
      
 39 
     | 
    
         
            +
                    return 0
         
     | 
| 
      
 40 
     | 
    
         
            +
                end
         
     | 
| 
      
 41 
     | 
    
         
            +
                
         
     | 
| 
      
 42 
     | 
    
         
            +
                def release(marker)
         
     | 
| 
      
 43 
     | 
    
         
            +
                    # no resources to release
         
     | 
| 
      
 44 
     | 
    
         
            +
                end
         
     | 
| 
      
 45 
     | 
    
         
            +
                
         
     | 
| 
      
 46 
     | 
    
         
            +
                def reset()
         
     | 
| 
      
 47 
     | 
    
         
            +
                    self.seek(0)
         
     | 
| 
      
 48 
     | 
    
         
            +
                end
         
     | 
| 
      
 49 
     | 
    
         
            +
                def seek( index)
         
     | 
| 
      
 50 
     | 
    
         
            +
                    self.lazyInit()
         
     | 
| 
      
 51 
     | 
    
         
            +
                    self.index = self.adjustSeekIndex(index)
         
     | 
| 
      
 52 
     | 
    
         
            +
                end
         
     | 
| 
      
 53 
     | 
    
         
            +
                def get(index)
         
     | 
| 
      
 54 
     | 
    
         
            +
                    self.lazyInit()
         
     | 
| 
      
 55 
     | 
    
         
            +
                    return self.tokens[index]
         
     | 
| 
      
 56 
     | 
    
         
            +
                end
         
     | 
| 
      
 57 
     | 
    
         
            +
                def consume()
         
     | 
| 
      
 58 
     | 
    
         
            +
                    skipEofCheck = false
         
     | 
| 
      
 59 
     | 
    
         
            +
                    if self.index >= 0 then
         
     | 
| 
      
 60 
     | 
    
         
            +
                        if self.fetchedEOF then
         
     | 
| 
      
 61 
     | 
    
         
            +
                            # the last token in tokens is EOF. skip check if p indexes any
         
     | 
| 
      
 62 
     | 
    
         
            +
                            # fetched token except the last.
         
     | 
| 
      
 63 
     | 
    
         
            +
                            skipEofCheck = self.index < self.tokens.length - 1
         
     | 
| 
      
 64 
     | 
    
         
            +
                        else
         
     | 
| 
      
 65 
     | 
    
         
            +
                           # no EOF token in tokens. skip check if p indexes a fetched token.
         
     | 
| 
      
 66 
     | 
    
         
            +
                            skipEofCheck = self.index < self.tokens.length
         
     | 
| 
      
 67 
     | 
    
         
            +
                        end
         
     | 
| 
      
 68 
     | 
    
         
            +
                    else
         
     | 
| 
      
 69 
     | 
    
         
            +
                        # not yet initialized
         
     | 
| 
      
 70 
     | 
    
         
            +
                        skipEofCheck = false
         
     | 
| 
      
 71 
     | 
    
         
            +
                    end
         
     | 
| 
      
 72 
     | 
    
         
            +
                    if not skipEofCheck and self.LA(1) == Token::EOF then
         
     | 
| 
      
 73 
     | 
    
         
            +
                        raise IllegalStateException.new("cannot consume EOF")
         
     | 
| 
      
 74 
     | 
    
         
            +
                    end
         
     | 
| 
      
 75 
     | 
    
         
            +
                    if self.sync(self.index + 1) then
         
     | 
| 
      
 76 
     | 
    
         
            +
                        self.index = self.adjustSeekIndex(self.index + 1)
         
     | 
| 
      
 77 
     | 
    
         
            +
                    end
         
     | 
| 
      
 78 
     | 
    
         
            +
                end
         
     | 
| 
      
 79 
     | 
    
         
            +
                # Make sure index {@code i} in tokens has a token.
         
     | 
| 
      
 80 
     | 
    
         
            +
                #
         
     | 
| 
      
 81 
     | 
    
         
            +
                # @return {@code true} if a token is located at index {@code i}, otherwise
         
     | 
| 
      
 82 
     | 
    
         
            +
                #    {@code false}.
         
     | 
| 
      
 83 
     | 
    
         
            +
                # @see #get(int i)
         
     | 
| 
      
 84 
     | 
    
         
            +
                #/
         
     | 
| 
      
 85 
     | 
    
         
            +
                def sync(i)
         
     | 
| 
      
 86 
     | 
    
         
            +
                    #assert i >= 0
         
     | 
| 
      
 87 
     | 
    
         
            +
                    n = i - self.tokens.length + 1 # how many more elements we need?
         
     | 
| 
      
 88 
     | 
    
         
            +
                    if n > 0 then
         
     | 
| 
      
 89 
     | 
    
         
            +
                        fetched = self.fetch(n)
         
     | 
| 
      
 90 
     | 
    
         
            +
                        return fetched >= n
         
     | 
| 
      
 91 
     | 
    
         
            +
                    end
         
     | 
| 
      
 92 
     | 
    
         
            +
                    return true
         
     | 
| 
      
 93 
     | 
    
         
            +
                end
         
     | 
| 
      
 94 
     | 
    
         
            +
                # Add {@code n} elements to buffer.
         
     | 
| 
      
 95 
     | 
    
         
            +
                #
         
     | 
| 
      
 96 
     | 
    
         
            +
                # @return The actual number of elements added to the buffer.
         
     | 
| 
      
 97 
     | 
    
         
            +
                #/
         
     | 
| 
      
 98 
     | 
    
         
            +
                def fetch(n)
         
     | 
| 
      
 99 
     | 
    
         
            +
                    return 0 if self.fetchedEOF
         
     | 
| 
      
 100 
     | 
    
         
            +
                    1.upto(n) do |i| # for i in 0..n-1 do
         
     | 
| 
      
 101 
     | 
    
         
            +
                        t = self.tokenSource.nextToken()
         
     | 
| 
      
 102 
     | 
    
         
            +
                        t.tokenIndex = self.tokens.length
         
     | 
| 
      
 103 
     | 
    
         
            +
                        self.tokens.push(t)
         
     | 
| 
      
 104 
     | 
    
         
            +
                        if t.type==Token::EOF then
         
     | 
| 
      
 105 
     | 
    
         
            +
                            self.fetchedEOF = true
         
     | 
| 
      
 106 
     | 
    
         
            +
                            return i  #  i + 1
         
     | 
| 
      
 107 
     | 
    
         
            +
                        end
         
     | 
| 
      
 108 
     | 
    
         
            +
                    end
         
     | 
| 
      
 109 
     | 
    
         
            +
                    return n
         
     | 
| 
      
 110 
     | 
    
         
            +
                end
         
     | 
| 
      
 111 
     | 
    
         
            +
             
     | 
| 
      
 112 
     | 
    
         
            +
                # Get all tokens from start..stop inclusively#/
         
     | 
| 
      
 113 
     | 
    
         
            +
                def getTokens(start, stop, types=nil)
         
     | 
| 
      
 114 
     | 
    
         
            +
                    if start<0 or stop<0 then
         
     | 
| 
      
 115 
     | 
    
         
            +
                        return  nil
         
     | 
| 
      
 116 
     | 
    
         
            +
                    end
         
     | 
| 
      
 117 
     | 
    
         
            +
                    self.lazyInit()
         
     | 
| 
      
 118 
     | 
    
         
            +
                    subset = Array.new
         
     | 
| 
      
 119 
     | 
    
         
            +
                    if stop >= self.tokens.length
         
     | 
| 
      
 120 
     | 
    
         
            +
                        stop = self.tokens.length-1
         
     | 
| 
      
 121 
     | 
    
         
            +
                    end
         
     | 
| 
      
 122 
     | 
    
         
            +
                    for i in start..stop-1 do
         
     | 
| 
      
 123 
     | 
    
         
            +
                        t = self.tokens[i]
         
     | 
| 
      
 124 
     | 
    
         
            +
                        if t.type==Token::EOF
         
     | 
| 
      
 125 
     | 
    
         
            +
                            break
         
     | 
| 
      
 126 
     | 
    
         
            +
                        end
         
     | 
| 
      
 127 
     | 
    
         
            +
                        if (types.nil? or types.member?(t.type)) then
         
     | 
| 
      
 128 
     | 
    
         
            +
                            subset.push(t)
         
     | 
| 
      
 129 
     | 
    
         
            +
                        end
         
     | 
| 
      
 130 
     | 
    
         
            +
                    end
         
     | 
| 
      
 131 
     | 
    
         
            +
                    return subset
         
     | 
| 
      
 132 
     | 
    
         
            +
                end
         
     | 
| 
      
 133 
     | 
    
         
            +
                def LA(i)
         
     | 
| 
      
 134 
     | 
    
         
            +
                    return self.LT(i).type
         
     | 
| 
      
 135 
     | 
    
         
            +
                end
         
     | 
| 
      
 136 
     | 
    
         
            +
                def LB(k)
         
     | 
| 
      
 137 
     | 
    
         
            +
                    return nil if (self.index-k) < 0
         
     | 
| 
      
 138 
     | 
    
         
            +
                    return self.tokens[self.index-k]
         
     | 
| 
      
 139 
     | 
    
         
            +
                end
         
     | 
| 
      
 140 
     | 
    
         
            +
                def LT(k)
         
     | 
| 
      
 141 
     | 
    
         
            +
                    self.lazyInit()
         
     | 
| 
      
 142 
     | 
    
         
            +
                    return nil if k==0
         
     | 
| 
      
 143 
     | 
    
         
            +
                    return self.LB(-k) if k < 0
         
     | 
| 
      
 144 
     | 
    
         
            +
                    i = self.index + k - 1
         
     | 
| 
      
 145 
     | 
    
         
            +
                    self.sync(i)
         
     | 
| 
      
 146 
     | 
    
         
            +
                    if i >= self.tokens.length then # return EOF token
         
     | 
| 
      
 147 
     | 
    
         
            +
                        # EOF must be last token
         
     | 
| 
      
 148 
     | 
    
         
            +
                        return self.tokens[self.tokens.length-1]
         
     | 
| 
      
 149 
     | 
    
         
            +
                    end
         
     | 
| 
      
 150 
     | 
    
         
            +
                    return self.tokens[i]
         
     | 
| 
      
 151 
     | 
    
         
            +
                end
         
     | 
| 
      
 152 
     | 
    
         
            +
                # Allowed derived classes to modify the behavior of operations which change
         
     | 
| 
      
 153 
     | 
    
         
            +
                # the current stream position by adjusting the target token index of a seek
         
     | 
| 
      
 154 
     | 
    
         
            +
                # operation. The default implementation simply returns {@code i}. If an
         
     | 
| 
      
 155 
     | 
    
         
            +
                # exception is thrown in this method, the current stream index should not be
         
     | 
| 
      
 156 
     | 
    
         
            +
                # changed.
         
     | 
| 
      
 157 
     | 
    
         
            +
                #
         
     | 
| 
      
 158 
     | 
    
         
            +
                # <p>For example, {@link CommonTokenStream} overrides this method to ensure that
         
     | 
| 
      
 159 
     | 
    
         
            +
                # the seek target is always an on-channel token.</p>
         
     | 
| 
      
 160 
     | 
    
         
            +
                #
         
     | 
| 
      
 161 
     | 
    
         
            +
                # @param i The target token index.
         
     | 
| 
      
 162 
     | 
    
         
            +
                # @return The adjusted target token index.
         
     | 
| 
      
 163 
     | 
    
         
            +
             
     | 
| 
      
 164 
     | 
    
         
            +
                def adjustSeekIndex(i)
         
     | 
| 
      
 165 
     | 
    
         
            +
                    return i
         
     | 
| 
      
 166 
     | 
    
         
            +
                end
         
     | 
| 
      
 167 
     | 
    
         
            +
             
     | 
| 
      
 168 
     | 
    
         
            +
                def lazyInit
         
     | 
| 
      
 169 
     | 
    
         
            +
                    if self.index == -1 then
         
     | 
| 
      
 170 
     | 
    
         
            +
                        self.setup()
         
     | 
| 
      
 171 
     | 
    
         
            +
                    end
         
     | 
| 
      
 172 
     | 
    
         
            +
                end
         
     | 
| 
      
 173 
     | 
    
         
            +
             
     | 
| 
      
 174 
     | 
    
         
            +
                def setup()
         
     | 
| 
      
 175 
     | 
    
         
            +
                    self.sync(0)
         
     | 
| 
      
 176 
     | 
    
         
            +
                    self.index = self.adjustSeekIndex(0)
         
     | 
| 
      
 177 
     | 
    
         
            +
                end
         
     | 
| 
      
 178 
     | 
    
         
            +
             
     | 
| 
      
 179 
     | 
    
         
            +
                # Reset this token stream by setting its token source.#/
         
     | 
| 
      
 180 
     | 
    
         
            +
                def setTokenSource(tokenSource)
         
     | 
| 
      
 181 
     | 
    
         
            +
                    self.tokenSource = tokenSource
         
     | 
| 
      
 182 
     | 
    
         
            +
                    self.tokens = []
         
     | 
| 
      
 183 
     | 
    
         
            +
                    self.index = -1
         
     | 
| 
      
 184 
     | 
    
         
            +
                end
         
     | 
| 
      
 185 
     | 
    
         
            +
             
     | 
| 
      
 186 
     | 
    
         
            +
             
     | 
| 
      
 187 
     | 
    
         
            +
             
     | 
| 
      
 188 
     | 
    
         
            +
                # Given a starting index, return the index of the next token on channel.
         
     | 
| 
      
 189 
     | 
    
         
            +
                #  Return i if tokens[i] is on channel.  Return -1 if there are no tokens
         
     | 
| 
      
 190 
     | 
    
         
            +
                #  on channel between i and EOF.
         
     | 
| 
      
 191 
     | 
    
         
            +
                #/
         
     | 
| 
      
 192 
     | 
    
         
            +
                def nextTokenOnChannel(i, channel)
         
     | 
| 
      
 193 
     | 
    
         
            +
                    self.sync(i)
         
     | 
| 
      
 194 
     | 
    
         
            +
                    return -1 if i>=self.tokens.length 
         
     | 
| 
      
 195 
     | 
    
         
            +
                    token = self.tokens[i]
         
     | 
| 
      
 196 
     | 
    
         
            +
                    while token.channel!=self.channel do
         
     | 
| 
      
 197 
     | 
    
         
            +
                        return -1 if token.type==Token::EOF
         
     | 
| 
      
 198 
     | 
    
         
            +
                        i = i + 1
         
     | 
| 
      
 199 
     | 
    
         
            +
                        self.sync(i)
         
     | 
| 
      
 200 
     | 
    
         
            +
                        token = self.tokens[i]
         
     | 
| 
      
 201 
     | 
    
         
            +
                    end
         
     | 
| 
      
 202 
     | 
    
         
            +
                    return i
         
     | 
| 
      
 203 
     | 
    
         
            +
                end
         
     | 
| 
      
 204 
     | 
    
         
            +
                # Given a starting index, return the index of the previous token on channel.
         
     | 
| 
      
 205 
     | 
    
         
            +
                #  Return i if tokens[i] is on channel. Return -1 if there are no tokens
         
     | 
| 
      
 206 
     | 
    
         
            +
                #  on channel between i and 0.
         
     | 
| 
      
 207 
     | 
    
         
            +
                def previousTokenOnChannel(i, channel)
         
     | 
| 
      
 208 
     | 
    
         
            +
                    while i>=0 and self.tokens[i].channel!=channel do
         
     | 
| 
      
 209 
     | 
    
         
            +
                        i = i - 1
         
     | 
| 
      
 210 
     | 
    
         
            +
                    end
         
     | 
| 
      
 211 
     | 
    
         
            +
                    return i
         
     | 
| 
      
 212 
     | 
    
         
            +
                end
         
     | 
| 
      
 213 
     | 
    
         
            +
                # Collect all tokens on specified channel to the right of
         
     | 
| 
      
 214 
     | 
    
         
            +
                #  the current token up until we see a token on DEFAULT_TOKEN_CHANNEL or
         
     | 
| 
      
 215 
     | 
    
         
            +
                #  EOF. If channel is -1, find any non default channel token.
         
     | 
| 
      
 216 
     | 
    
         
            +
                def getHiddenTokensToRight(tokenIndex, channel=-1)
         
     | 
| 
      
 217 
     | 
    
         
            +
                    self.lazyInit()
         
     | 
| 
      
 218 
     | 
    
         
            +
                    if self.tokenIndex<0 or tokenIndex>=self.tokens.length then
         
     | 
| 
      
 219 
     | 
    
         
            +
                        raise Exception.new("#{tokenIndex} not in 0..#{self.tokens.length-1}")
         
     | 
| 
      
 220 
     | 
    
         
            +
                    end
         
     | 
| 
      
 221 
     | 
    
         
            +
                    nextOnChannel = self.nextTokenOnChannel(tokenIndex + 1, Lexer::DEFAULT_TOKEN_CHANNEL)
         
     | 
| 
      
 222 
     | 
    
         
            +
                    from_ = tokenIndex+1
         
     | 
| 
      
 223 
     | 
    
         
            +
                    # if none onchannel to right, nextOnChannel=-1 so set to = last token
         
     | 
| 
      
 224 
     | 
    
         
            +
                    if nextOnChannel==-1 
         
     | 
| 
      
 225 
     | 
    
         
            +
                        to = self.tokens.length-1
         
     | 
| 
      
 226 
     | 
    
         
            +
                    else 
         
     | 
| 
      
 227 
     | 
    
         
            +
                        to = nextOnChannel
         
     | 
| 
      
 228 
     | 
    
         
            +
                    end
         
     | 
| 
      
 229 
     | 
    
         
            +
                    return self.filterForChannel(from_, to, channel)
         
     | 
| 
      
 230 
     | 
    
         
            +
                end
         
     | 
| 
      
 231 
     | 
    
         
            +
             
     | 
| 
      
 232 
     | 
    
         
            +
                # Collect all tokens on specified channel to the left of
         
     | 
| 
      
 233 
     | 
    
         
            +
                #  the current token up until we see a token on DEFAULT_TOKEN_CHANNEL.
         
     | 
| 
      
 234 
     | 
    
         
            +
                #  If channel is -1, find any non default channel token.
         
     | 
| 
      
 235 
     | 
    
         
            +
                def getHiddenTokensToLeft(tokenIndex, channel=-1)
         
     | 
| 
      
 236 
     | 
    
         
            +
                    self.lazyInit()
         
     | 
| 
      
 237 
     | 
    
         
            +
                    if tokenIndex<0 or tokenIndex>=self.tokens.length
         
     | 
| 
      
 238 
     | 
    
         
            +
                        raise Exception.new("#{tokenIndex} not in 0..#{self.tokens.length-1}")
         
     | 
| 
      
 239 
     | 
    
         
            +
                    end
         
     | 
| 
      
 240 
     | 
    
         
            +
                    prevOnChannel = self.previousTokenOnChannel(tokenIndex - 1, Lexer::DEFAULT_TOKEN_CHANNEL)
         
     | 
| 
      
 241 
     | 
    
         
            +
                    return nil if prevOnChannel == tokenIndex - 1
         
     | 
| 
      
 242 
     | 
    
         
            +
                    
         
     | 
| 
      
 243 
     | 
    
         
            +
                    # if none on channel to left, prevOnChannel=-1 then from=0
         
     | 
| 
      
 244 
     | 
    
         
            +
                    from_ = prevOnChannel+1
         
     | 
| 
      
 245 
     | 
    
         
            +
                    to = tokenIndex-1
         
     | 
| 
      
 246 
     | 
    
         
            +
                    return self.filterForChannel(from_, to, channel)
         
     | 
| 
      
 247 
     | 
    
         
            +
                end
         
     | 
| 
      
 248 
     | 
    
         
            +
             
     | 
| 
      
 249 
     | 
    
         
            +
                def filterForChannel(left, right, channel)
         
     | 
| 
      
 250 
     | 
    
         
            +
                    hidden = []
         
     | 
| 
      
 251 
     | 
    
         
            +
                    for i in left..right do
         
     | 
| 
      
 252 
     | 
    
         
            +
                        t = self.tokens[i]
         
     | 
| 
      
 253 
     | 
    
         
            +
                        if channel==-1 then
         
     | 
| 
      
 254 
     | 
    
         
            +
                            if t.channel!= Lexer::DEFAULT_TOKEN_CHANNEL
         
     | 
| 
      
 255 
     | 
    
         
            +
                                hidden.push(t)
         
     | 
| 
      
 256 
     | 
    
         
            +
                            end
         
     | 
| 
      
 257 
     | 
    
         
            +
                        elsif t.channel==channel then
         
     | 
| 
      
 258 
     | 
    
         
            +
                                hidden.push(t)
         
     | 
| 
      
 259 
     | 
    
         
            +
                        end
         
     | 
| 
      
 260 
     | 
    
         
            +
                    end
         
     | 
| 
      
 261 
     | 
    
         
            +
                    return nil if hidden.length==0 
         
     | 
| 
      
 262 
     | 
    
         
            +
                    return hidden
         
     | 
| 
      
 263 
     | 
    
         
            +
                end
         
     | 
| 
      
 264 
     | 
    
         
            +
             
     | 
| 
      
 265 
     | 
    
         
            +
                def getSourceName
         
     | 
| 
      
 266 
     | 
    
         
            +
                    return self.tokenSource.getSourceName()
         
     | 
| 
      
 267 
     | 
    
         
            +
                end
         
     | 
| 
      
 268 
     | 
    
         
            +
             
     | 
| 
      
 269 
     | 
    
         
            +
                # Get the text of all tokens in this buffer.#/
         
     | 
| 
      
 270 
     | 
    
         
            +
                def getText(interval=nil)
         
     | 
| 
      
 271 
     | 
    
         
            +
                    self.lazyInit()
         
     | 
| 
      
 272 
     | 
    
         
            +
                    self.fill()
         
     | 
| 
      
 273 
     | 
    
         
            +
                    if interval.nil?
         
     | 
| 
      
 274 
     | 
    
         
            +
                        interval = [0, self.tokens.length-1]
         
     | 
| 
      
 275 
     | 
    
         
            +
                    end
         
     | 
| 
      
 276 
     | 
    
         
            +
                    start = interval[0]
         
     | 
| 
      
 277 
     | 
    
         
            +
                    if start.kind_of? Token
         
     | 
| 
      
 278 
     | 
    
         
            +
                        start = start.tokenIndex
         
     | 
| 
      
 279 
     | 
    
         
            +
                    end
         
     | 
| 
      
 280 
     | 
    
         
            +
                    stop = interval[1]
         
     | 
| 
      
 281 
     | 
    
         
            +
                    if stop.kind_of? Token
         
     | 
| 
      
 282 
     | 
    
         
            +
                        stop = stop.tokenIndex
         
     | 
| 
      
 283 
     | 
    
         
            +
                    end
         
     | 
| 
      
 284 
     | 
    
         
            +
                    if start.nil? or stop.nil? or start<0 or stop<0
         
     | 
| 
      
 285 
     | 
    
         
            +
                        return ""
         
     | 
| 
      
 286 
     | 
    
         
            +
                    end
         
     | 
| 
      
 287 
     | 
    
         
            +
                    if stop >= self.tokens.length
         
     | 
| 
      
 288 
     | 
    
         
            +
                        stop = self.tokens.length-1
         
     | 
| 
      
 289 
     | 
    
         
            +
                    end
         
     | 
| 
      
 290 
     | 
    
         
            +
                    StringIO.open  do |buf|
         
     | 
| 
      
 291 
     | 
    
         
            +
                        for i in start..stop do
         
     | 
| 
      
 292 
     | 
    
         
            +
                            t = self.tokens[i]
         
     | 
| 
      
 293 
     | 
    
         
            +
                            break if t.type==Token::EOF
         
     | 
| 
      
 294 
     | 
    
         
            +
                            buf.write(t.text)
         
     | 
| 
      
 295 
     | 
    
         
            +
                        end
         
     | 
| 
      
 296 
     | 
    
         
            +
                        return buf.string()
         
     | 
| 
      
 297 
     | 
    
         
            +
                    end
         
     | 
| 
      
 298 
     | 
    
         
            +
                end
         
     | 
| 
      
 299 
     | 
    
         
            +
                # Get all tokens from lexer until EOF#/
         
     | 
| 
      
 300 
     | 
    
         
            +
                def fill
         
     | 
| 
      
 301 
     | 
    
         
            +
                    self.lazyInit()
         
     | 
| 
      
 302 
     | 
    
         
            +
                    while fetch(1000)==1000 do
         
     | 
| 
      
 303 
     | 
    
         
            +
                        nil
         
     | 
| 
      
 304 
     | 
    
         
            +
                    end
         
     | 
| 
      
 305 
     | 
    
         
            +
                end
         
     | 
| 
      
 306 
     | 
    
         
            +
            end
         
     |