omlish 0.0.0.dev57__py3-none-any.whl → 0.0.0.dev58__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- omlish/__about__.py +2 -2
- omlish/antlr/__init__.py +0 -0
- omlish/antlr/_runtime/BufferedTokenStream.py +305 -0
- omlish/antlr/_runtime/CommonTokenFactory.py +64 -0
- omlish/antlr/_runtime/CommonTokenStream.py +90 -0
- omlish/antlr/_runtime/FileStream.py +30 -0
- omlish/antlr/_runtime/InputStream.py +90 -0
- omlish/antlr/_runtime/IntervalSet.py +183 -0
- omlish/antlr/_runtime/LL1Analyzer.py +176 -0
- omlish/antlr/_runtime/Lexer.py +332 -0
- omlish/antlr/_runtime/ListTokenSource.py +147 -0
- omlish/antlr/_runtime/Parser.py +583 -0
- omlish/antlr/_runtime/ParserInterpreter.py +173 -0
- omlish/antlr/_runtime/ParserRuleContext.py +189 -0
- omlish/antlr/_runtime/PredictionContext.py +632 -0
- omlish/antlr/_runtime/Recognizer.py +150 -0
- omlish/antlr/_runtime/RuleContext.py +230 -0
- omlish/antlr/_runtime/StdinStream.py +14 -0
- omlish/antlr/_runtime/Token.py +158 -0
- omlish/antlr/_runtime/TokenStreamRewriter.py +258 -0
- omlish/antlr/_runtime/Utils.py +36 -0
- omlish/antlr/_runtime/__init__.py +24 -0
- omlish/antlr/_runtime/_pygrun.py +174 -0
- omlish/antlr/_runtime/atn/ATN.py +135 -0
- omlish/antlr/_runtime/atn/ATNConfig.py +162 -0
- omlish/antlr/_runtime/atn/ATNConfigSet.py +215 -0
- omlish/antlr/_runtime/atn/ATNDeserializationOptions.py +27 -0
- omlish/antlr/_runtime/atn/ATNDeserializer.py +449 -0
- omlish/antlr/_runtime/atn/ATNSimulator.py +50 -0
- omlish/antlr/_runtime/atn/ATNState.py +267 -0
- omlish/antlr/_runtime/atn/ATNType.py +20 -0
- omlish/antlr/_runtime/atn/LexerATNSimulator.py +573 -0
- omlish/antlr/_runtime/atn/LexerAction.py +301 -0
- omlish/antlr/_runtime/atn/LexerActionExecutor.py +146 -0
- omlish/antlr/_runtime/atn/ParserATNSimulator.py +1664 -0
- omlish/antlr/_runtime/atn/PredictionMode.py +502 -0
- omlish/antlr/_runtime/atn/SemanticContext.py +333 -0
- omlish/antlr/_runtime/atn/Transition.py +271 -0
- omlish/antlr/_runtime/atn/__init__.py +4 -0
- omlish/antlr/_runtime/dfa/DFA.py +136 -0
- omlish/antlr/_runtime/dfa/DFASerializer.py +76 -0
- omlish/antlr/_runtime/dfa/DFAState.py +129 -0
- omlish/antlr/_runtime/dfa/__init__.py +4 -0
- omlish/antlr/_runtime/error/DiagnosticErrorListener.py +110 -0
- omlish/antlr/_runtime/error/ErrorListener.py +75 -0
- omlish/antlr/_runtime/error/ErrorStrategy.py +712 -0
- omlish/antlr/_runtime/error/Errors.py +176 -0
- omlish/antlr/_runtime/error/__init__.py +4 -0
- omlish/antlr/_runtime/tree/Chunk.py +33 -0
- omlish/antlr/_runtime/tree/ParseTreeMatch.py +121 -0
- omlish/antlr/_runtime/tree/ParseTreePattern.py +75 -0
- omlish/antlr/_runtime/tree/ParseTreePatternMatcher.py +377 -0
- omlish/antlr/_runtime/tree/RuleTagToken.py +53 -0
- omlish/antlr/_runtime/tree/TokenTagToken.py +50 -0
- omlish/antlr/_runtime/tree/Tree.py +194 -0
- omlish/antlr/_runtime/tree/Trees.py +114 -0
- omlish/antlr/_runtime/tree/__init__.py +2 -0
- omlish/antlr/_runtime/xpath/XPath.py +272 -0
- omlish/antlr/_runtime/xpath/XPathLexer.py +98 -0
- omlish/antlr/_runtime/xpath/__init__.py +4 -0
- {omlish-0.0.0.dev57.dist-info → omlish-0.0.0.dev58.dist-info}/METADATA +1 -1
- {omlish-0.0.0.dev57.dist-info → omlish-0.0.0.dev58.dist-info}/RECORD +66 -7
- {omlish-0.0.0.dev57.dist-info → omlish-0.0.0.dev58.dist-info}/LICENSE +0 -0
- {omlish-0.0.0.dev57.dist-info → omlish-0.0.0.dev58.dist-info}/WHEEL +0 -0
- {omlish-0.0.0.dev57.dist-info → omlish-0.0.0.dev58.dist-info}/entry_points.txt +0 -0
- {omlish-0.0.0.dev57.dist-info → omlish-0.0.0.dev58.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,147 @@
|
|
1
|
+
# type: ignore
|
2
|
+
# ruff: noqa
|
3
|
+
# flake8: noqa
|
4
|
+
#
|
5
|
+
# Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
|
6
|
+
# Use of this file is governed by the BSD 3-clause license that
|
7
|
+
# can be found in the LICENSE.txt file in the project root.
|
8
|
+
#
|
9
|
+
|
10
|
+
#
|
11
|
+
# Provides an implementation of {@link TokenSource} as a wrapper around a list
|
12
|
+
# of {@link Token} objects.
|
13
|
+
#
|
14
|
+
# <p>If the final token in the list is an {@link Token#EOF} token, it will be used
|
15
|
+
# as the EOF token for every call to {@link #nextToken} after the end of the
|
16
|
+
# list is reached. Otherwise, an EOF token will be created.</p>
|
17
|
+
#
|
18
|
+
from .CommonTokenFactory import CommonTokenFactory
|
19
|
+
from .Lexer import TokenSource
|
20
|
+
from .Token import Token
|
21
|
+
|
22
|
+
|
23
|
+
class ListTokenSource(TokenSource):
|
24
|
+
__slots__ = ('tokens', 'sourceName', 'pos', 'eofToken', '_factory')
|
25
|
+
|
26
|
+
# Constructs a new {@link ListTokenSource} instance from the specified
|
27
|
+
# collection of {@link Token} objects and source name.
|
28
|
+
#
|
29
|
+
# @param tokens The collection of {@link Token} objects to provide as a
|
30
|
+
# {@link TokenSource}.
|
31
|
+
# @param sourceName The name of the {@link TokenSource}. If this value is
|
32
|
+
# {@code null}, {@link #getSourceName} will attempt to infer the name from
|
33
|
+
# the next {@link Token} (or the previous token if the end of the input has
|
34
|
+
# been reached).
|
35
|
+
#
|
36
|
+
# @exception NullPointerException if {@code tokens} is {@code null}
|
37
|
+
#
|
38
|
+
def __init__(self, tokens:list, sourceName:str=None):
|
39
|
+
if tokens is None:
|
40
|
+
raise ReferenceError("tokens cannot be null")
|
41
|
+
self.tokens = tokens
|
42
|
+
self.sourceName = sourceName
|
43
|
+
# The index into {@link #tokens} of token to return by the next call to
|
44
|
+
# {@link #nextToken}. The end of the input is indicated by this value
|
45
|
+
# being greater than or equal to the number of items in {@link #tokens}.
|
46
|
+
self.pos = 0
|
47
|
+
# This field caches the EOF token for the token source.
|
48
|
+
self.eofToken = None
|
49
|
+
# This is the backing field for {@link #getTokenFactory} and
|
50
|
+
self._factory = CommonTokenFactory.DEFAULT
|
51
|
+
|
52
|
+
|
53
|
+
#
|
54
|
+
# {@inheritDoc}
|
55
|
+
#
|
56
|
+
@property
|
57
|
+
def column(self):
|
58
|
+
if self.pos < len(self.tokens):
|
59
|
+
return self.tokens[self.pos].column
|
60
|
+
elif self.eofToken is not None:
|
61
|
+
return self.eofToken.column
|
62
|
+
elif len(self.tokens) > 0:
|
63
|
+
# have to calculate the result from the line/column of the previous
|
64
|
+
# token, along with the text of the token.
|
65
|
+
lastToken = self.tokens[len(self.tokens) - 1]
|
66
|
+
tokenText = lastToken.text
|
67
|
+
if tokenText is not None:
|
68
|
+
lastNewLine = tokenText.rfind('\n')
|
69
|
+
if lastNewLine >= 0:
|
70
|
+
return len(tokenText) - lastNewLine - 1
|
71
|
+
return lastToken.column + lastToken.stop - lastToken.start + 1
|
72
|
+
|
73
|
+
# only reach this if tokens is empty, meaning EOF occurs at the first
|
74
|
+
# position in the input
|
75
|
+
return 0
|
76
|
+
|
77
|
+
#
|
78
|
+
# {@inheritDoc}
|
79
|
+
#
|
80
|
+
def nextToken(self):
|
81
|
+
if self.pos >= len(self.tokens):
|
82
|
+
if self.eofToken is None:
|
83
|
+
start = -1
|
84
|
+
if len(self.tokens) > 0:
|
85
|
+
previousStop = self.tokens[len(self.tokens) - 1].stop
|
86
|
+
if previousStop != -1:
|
87
|
+
start = previousStop + 1
|
88
|
+
stop = max(-1, start - 1)
|
89
|
+
self.eofToken = self._factory.create((self, self.getInputStream()),
|
90
|
+
Token.EOF, "EOF", Token.DEFAULT_CHANNEL, start, stop, self.line, self.column)
|
91
|
+
return self.eofToken
|
92
|
+
t = self.tokens[self.pos]
|
93
|
+
if self.pos == len(self.tokens) - 1 and t.type == Token.EOF:
|
94
|
+
self.eofToken = t
|
95
|
+
self.pos += 1
|
96
|
+
return t
|
97
|
+
|
98
|
+
#
|
99
|
+
# {@inheritDoc}
|
100
|
+
#
|
101
|
+
@property
|
102
|
+
def line(self):
|
103
|
+
if self.pos < len(self.tokens):
|
104
|
+
return self.tokens[self.pos].line
|
105
|
+
elif self.eofToken is not None:
|
106
|
+
return self.eofToken.line
|
107
|
+
elif len(self.tokens) > 0:
|
108
|
+
# have to calculate the result from the line/column of the previous
|
109
|
+
# token, along with the text of the token.
|
110
|
+
lastToken = self.tokens[len(self.tokens) - 1]
|
111
|
+
line = lastToken.line
|
112
|
+
tokenText = lastToken.text
|
113
|
+
if tokenText is not None:
|
114
|
+
line += tokenText.count('\n')
|
115
|
+
|
116
|
+
# if no text is available, assume the token did not contain any newline characters.
|
117
|
+
return line
|
118
|
+
|
119
|
+
# only reach this if tokens is empty, meaning EOF occurs at the first
|
120
|
+
# position in the input
|
121
|
+
return 1
|
122
|
+
|
123
|
+
#
|
124
|
+
# {@inheritDoc}
|
125
|
+
#
|
126
|
+
def getInputStream(self):
|
127
|
+
if self.pos < len(self.tokens):
|
128
|
+
return self.tokens[self.pos].getInputStream()
|
129
|
+
elif self.eofToken is not None:
|
130
|
+
return self.eofToken.getInputStream()
|
131
|
+
elif len(self.tokens) > 0:
|
132
|
+
return self.tokens[len(self.tokens) - 1].getInputStream()
|
133
|
+
else:
|
134
|
+
# no input stream information is available
|
135
|
+
return None
|
136
|
+
|
137
|
+
#
|
138
|
+
# {@inheritDoc}
|
139
|
+
#
|
140
|
+
def getSourceName(self):
|
141
|
+
if self.sourceName is not None:
|
142
|
+
return self.sourceName
|
143
|
+
inputStream = self.getInputStream()
|
144
|
+
if inputStream is not None:
|
145
|
+
return inputStream.getSourceName()
|
146
|
+
else:
|
147
|
+
return "List"
|