ccxt 4.3.70__py2.py3-none-any.whl → 4.3.72__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ccxt might be problematic. Click here for more details.
- ccxt/__init__.py +3 -1
- ccxt/abstract/paradex.py +40 -0
- ccxt/async_support/__init__.py +3 -1
- ccxt/async_support/base/exchange.py +1 -1
- ccxt/async_support/paradex.py +1966 -0
- ccxt/async_support/woo.py +4 -2
- ccxt/base/exchange.py +58 -1
- ccxt/paradex.py +1966 -0
- ccxt/pro/__init__.py +3 -1
- ccxt/pro/bequant.py +4 -0
- ccxt/pro/paradex.py +340 -0
- ccxt/static_dependencies/__init__.py +1 -1
- ccxt/static_dependencies/lark/__init__.py +38 -0
- ccxt/static_dependencies/lark/__pyinstaller/__init__.py +6 -0
- ccxt/static_dependencies/lark/__pyinstaller/hook-lark.py +14 -0
- ccxt/static_dependencies/lark/ast_utils.py +59 -0
- ccxt/static_dependencies/lark/common.py +86 -0
- ccxt/static_dependencies/lark/exceptions.py +292 -0
- ccxt/static_dependencies/lark/grammar.py +130 -0
- ccxt/static_dependencies/lark/grammars/__init__.py +0 -0
- ccxt/static_dependencies/lark/indenter.py +143 -0
- ccxt/static_dependencies/lark/lark.py +658 -0
- ccxt/static_dependencies/lark/lexer.py +678 -0
- ccxt/static_dependencies/lark/load_grammar.py +1428 -0
- ccxt/static_dependencies/lark/parse_tree_builder.py +391 -0
- ccxt/static_dependencies/lark/parser_frontends.py +257 -0
- ccxt/static_dependencies/lark/parsers/__init__.py +0 -0
- ccxt/static_dependencies/lark/parsers/cyk.py +340 -0
- ccxt/static_dependencies/lark/parsers/earley.py +314 -0
- ccxt/static_dependencies/lark/parsers/earley_common.py +42 -0
- ccxt/static_dependencies/lark/parsers/earley_forest.py +801 -0
- ccxt/static_dependencies/lark/parsers/grammar_analysis.py +203 -0
- ccxt/static_dependencies/lark/parsers/lalr_analysis.py +332 -0
- ccxt/static_dependencies/lark/parsers/lalr_interactive_parser.py +158 -0
- ccxt/static_dependencies/lark/parsers/lalr_parser.py +122 -0
- ccxt/static_dependencies/lark/parsers/lalr_parser_state.py +110 -0
- ccxt/static_dependencies/lark/parsers/xearley.py +165 -0
- ccxt/static_dependencies/lark/py.typed +0 -0
- ccxt/static_dependencies/lark/reconstruct.py +107 -0
- ccxt/static_dependencies/lark/tools/__init__.py +70 -0
- ccxt/static_dependencies/lark/tools/nearley.py +202 -0
- ccxt/static_dependencies/lark/tools/serialize.py +32 -0
- ccxt/static_dependencies/lark/tools/standalone.py +196 -0
- ccxt/static_dependencies/lark/tree.py +267 -0
- ccxt/static_dependencies/lark/tree_matcher.py +186 -0
- ccxt/static_dependencies/lark/tree_templates.py +180 -0
- ccxt/static_dependencies/lark/utils.py +343 -0
- ccxt/static_dependencies/lark/visitors.py +596 -0
- ccxt/static_dependencies/marshmallow/__init__.py +81 -0
- ccxt/static_dependencies/marshmallow/base.py +65 -0
- ccxt/static_dependencies/marshmallow/class_registry.py +94 -0
- ccxt/static_dependencies/marshmallow/decorators.py +231 -0
- ccxt/static_dependencies/marshmallow/error_store.py +60 -0
- ccxt/static_dependencies/marshmallow/exceptions.py +71 -0
- ccxt/static_dependencies/marshmallow/fields.py +2114 -0
- ccxt/static_dependencies/marshmallow/orderedset.py +89 -0
- ccxt/static_dependencies/marshmallow/py.typed +0 -0
- ccxt/static_dependencies/marshmallow/schema.py +1228 -0
- ccxt/static_dependencies/marshmallow/types.py +12 -0
- ccxt/static_dependencies/marshmallow/utils.py +378 -0
- ccxt/static_dependencies/marshmallow/validate.py +678 -0
- ccxt/static_dependencies/marshmallow/warnings.py +2 -0
- ccxt/static_dependencies/marshmallow_dataclass/__init__.py +1047 -0
- ccxt/static_dependencies/marshmallow_dataclass/collection_field.py +51 -0
- ccxt/static_dependencies/marshmallow_dataclass/lazy_class_attribute.py +45 -0
- ccxt/static_dependencies/marshmallow_dataclass/mypy.py +71 -0
- ccxt/static_dependencies/marshmallow_dataclass/py.typed +0 -0
- ccxt/static_dependencies/marshmallow_dataclass/typing.py +14 -0
- ccxt/static_dependencies/marshmallow_dataclass/union_field.py +82 -0
- ccxt/static_dependencies/marshmallow_oneofschema/__init__.py +1 -0
- ccxt/static_dependencies/marshmallow_oneofschema/one_of_schema.py +193 -0
- ccxt/static_dependencies/marshmallow_oneofschema/py.typed +0 -0
- ccxt/static_dependencies/starknet/__init__.py +0 -0
- ccxt/static_dependencies/starknet/cairo/__init__.py +0 -0
- ccxt/static_dependencies/starknet/cairo/data_types.py +123 -0
- ccxt/static_dependencies/starknet/cairo/deprecated_parse/__init__.py +0 -0
- ccxt/static_dependencies/starknet/cairo/deprecated_parse/cairo_types.py +77 -0
- ccxt/static_dependencies/starknet/cairo/deprecated_parse/parser.py +46 -0
- ccxt/static_dependencies/starknet/cairo/deprecated_parse/parser_transformer.py +138 -0
- ccxt/static_dependencies/starknet/cairo/felt.py +64 -0
- ccxt/static_dependencies/starknet/cairo/type_parser.py +121 -0
- ccxt/static_dependencies/starknet/cairo/v1/__init__.py +0 -0
- ccxt/static_dependencies/starknet/cairo/v1/type_parser.py +59 -0
- ccxt/static_dependencies/starknet/cairo/v2/__init__.py +0 -0
- ccxt/static_dependencies/starknet/cairo/v2/type_parser.py +77 -0
- ccxt/static_dependencies/starknet/ccxt_utils.py +7 -0
- ccxt/static_dependencies/starknet/common.py +15 -0
- ccxt/static_dependencies/starknet/constants.py +39 -0
- ccxt/static_dependencies/starknet/hash/__init__.py +0 -0
- ccxt/static_dependencies/starknet/hash/address.py +79 -0
- ccxt/static_dependencies/starknet/hash/compiled_class_hash_objects.py +111 -0
- ccxt/static_dependencies/starknet/hash/selector.py +16 -0
- ccxt/static_dependencies/starknet/hash/storage.py +12 -0
- ccxt/static_dependencies/starknet/hash/utils.py +78 -0
- ccxt/static_dependencies/starknet/models/__init__.py +0 -0
- ccxt/static_dependencies/starknet/models/typed_data.py +45 -0
- ccxt/static_dependencies/starknet/serialization/__init__.py +24 -0
- ccxt/static_dependencies/starknet/serialization/_calldata_reader.py +40 -0
- ccxt/static_dependencies/starknet/serialization/_context.py +142 -0
- ccxt/static_dependencies/starknet/serialization/data_serializers/__init__.py +10 -0
- ccxt/static_dependencies/starknet/serialization/data_serializers/_common.py +82 -0
- ccxt/static_dependencies/starknet/serialization/data_serializers/array_serializer.py +43 -0
- ccxt/static_dependencies/starknet/serialization/data_serializers/bool_serializer.py +37 -0
- ccxt/static_dependencies/starknet/serialization/data_serializers/byte_array_serializer.py +66 -0
- ccxt/static_dependencies/starknet/serialization/data_serializers/cairo_data_serializer.py +71 -0
- ccxt/static_dependencies/starknet/serialization/data_serializers/enum_serializer.py +71 -0
- ccxt/static_dependencies/starknet/serialization/data_serializers/felt_serializer.py +50 -0
- ccxt/static_dependencies/starknet/serialization/data_serializers/named_tuple_serializer.py +58 -0
- ccxt/static_dependencies/starknet/serialization/data_serializers/option_serializer.py +43 -0
- ccxt/static_dependencies/starknet/serialization/data_serializers/output_serializer.py +40 -0
- ccxt/static_dependencies/starknet/serialization/data_serializers/payload_serializer.py +72 -0
- ccxt/static_dependencies/starknet/serialization/data_serializers/struct_serializer.py +36 -0
- ccxt/static_dependencies/starknet/serialization/data_serializers/tuple_serializer.py +36 -0
- ccxt/static_dependencies/starknet/serialization/data_serializers/uint256_serializer.py +76 -0
- ccxt/static_dependencies/starknet/serialization/data_serializers/uint_serializer.py +100 -0
- ccxt/static_dependencies/starknet/serialization/data_serializers/unit_serializer.py +32 -0
- ccxt/static_dependencies/starknet/serialization/errors.py +10 -0
- ccxt/static_dependencies/starknet/serialization/factory.py +229 -0
- ccxt/static_dependencies/starknet/serialization/function_serialization_adapter.py +110 -0
- ccxt/static_dependencies/starknet/serialization/tuple_dataclass.py +59 -0
- ccxt/static_dependencies/starknet/utils/__init__.py +0 -0
- ccxt/static_dependencies/starknet/utils/constructor_args_translator.py +86 -0
- ccxt/static_dependencies/starknet/utils/iterable.py +13 -0
- ccxt/static_dependencies/starknet/utils/schema.py +13 -0
- ccxt/static_dependencies/starknet/utils/typed_data.py +182 -0
- ccxt/static_dependencies/starkware/__init__.py +0 -0
- ccxt/static_dependencies/starkware/crypto/__init__.py +0 -0
- ccxt/static_dependencies/starkware/crypto/fast_pedersen_hash.py +50 -0
- ccxt/static_dependencies/starkware/crypto/math_utils.py +78 -0
- ccxt/static_dependencies/starkware/crypto/signature.py +2344 -0
- ccxt/static_dependencies/starkware/crypto/utils.py +58 -0
- ccxt/static_dependencies/sympy/__init__.py +0 -0
- ccxt/static_dependencies/sympy/core/__init__.py +0 -0
- ccxt/static_dependencies/sympy/core/intfunc.py +35 -0
- ccxt/static_dependencies/sympy/external/__init__.py +0 -0
- ccxt/static_dependencies/sympy/external/gmpy.py +345 -0
- ccxt/static_dependencies/sympy/external/importtools.py +187 -0
- ccxt/static_dependencies/sympy/external/ntheory.py +637 -0
- ccxt/static_dependencies/sympy/external/pythonmpq.py +341 -0
- ccxt/static_dependencies/typing_extensions/__init__.py +0 -0
- ccxt/static_dependencies/typing_extensions/typing_extensions.py +3839 -0
- ccxt/static_dependencies/typing_inspect/__init__.py +0 -0
- ccxt/static_dependencies/typing_inspect/typing_inspect.py +851 -0
- ccxt/test/tests_async.py +43 -1
- ccxt/test/tests_sync.py +43 -1
- ccxt/woo.py +4 -2
- {ccxt-4.3.70.dist-info → ccxt-4.3.72.dist-info}/METADATA +7 -6
- {ccxt-4.3.70.dist-info → ccxt-4.3.72.dist-info}/RECORD +151 -16
- {ccxt-4.3.70.dist-info → ccxt-4.3.72.dist-info}/LICENSE.txt +0 -0
- {ccxt-4.3.70.dist-info → ccxt-4.3.72.dist-info}/WHEEL +0 -0
- {ccxt-4.3.70.dist-info → ccxt-4.3.72.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,292 @@
|
|
1
|
+
from .utils import logger, NO_VALUE
|
2
|
+
from typing import Mapping, Iterable, Callable, Union, TypeVar, Tuple, Any, List, Set, Optional, Collection, TYPE_CHECKING
|
3
|
+
|
4
|
+
if TYPE_CHECKING:
|
5
|
+
from .lexer import Token
|
6
|
+
from .parsers.lalr_interactive_parser import InteractiveParser
|
7
|
+
from .tree import Tree
|
8
|
+
|
9
|
+
###{standalone
|
10
|
+
|
11
|
+
class LarkError(Exception):
|
12
|
+
pass
|
13
|
+
|
14
|
+
|
15
|
+
class ConfigurationError(LarkError, ValueError):
|
16
|
+
pass
|
17
|
+
|
18
|
+
|
19
|
+
def assert_config(value, options: Collection, msg='Got %r, expected one of %s'):
|
20
|
+
if value not in options:
|
21
|
+
raise ConfigurationError(msg % (value, options))
|
22
|
+
|
23
|
+
|
24
|
+
class GrammarError(LarkError):
|
25
|
+
pass
|
26
|
+
|
27
|
+
|
28
|
+
class ParseError(LarkError):
|
29
|
+
pass
|
30
|
+
|
31
|
+
|
32
|
+
class LexError(LarkError):
|
33
|
+
pass
|
34
|
+
|
35
|
+
T = TypeVar('T')
|
36
|
+
|
37
|
+
class UnexpectedInput(LarkError):
|
38
|
+
"""UnexpectedInput Error.
|
39
|
+
|
40
|
+
Used as a base class for the following exceptions:
|
41
|
+
|
42
|
+
- ``UnexpectedCharacters``: The lexer encountered an unexpected string
|
43
|
+
- ``UnexpectedToken``: The parser received an unexpected token
|
44
|
+
- ``UnexpectedEOF``: The parser expected a token, but the input ended
|
45
|
+
|
46
|
+
After catching one of these exceptions, you may call the following helper methods to create a nicer error message.
|
47
|
+
"""
|
48
|
+
line: int
|
49
|
+
column: int
|
50
|
+
pos_in_stream = None
|
51
|
+
state: Any
|
52
|
+
_terminals_by_name = None
|
53
|
+
interactive_parser: 'InteractiveParser'
|
54
|
+
|
55
|
+
def get_context(self, text: str, span: int=40) -> str:
|
56
|
+
"""Returns a pretty string pinpointing the error in the text,
|
57
|
+
with span amount of context characters around it.
|
58
|
+
|
59
|
+
Note:
|
60
|
+
The parser doesn't hold a copy of the text it has to parse,
|
61
|
+
so you have to provide it again
|
62
|
+
"""
|
63
|
+
assert self.pos_in_stream is not None, self
|
64
|
+
pos = self.pos_in_stream
|
65
|
+
start = max(pos - span, 0)
|
66
|
+
end = pos + span
|
67
|
+
if not isinstance(text, bytes):
|
68
|
+
before = text[start:pos].rsplit('\n', 1)[-1]
|
69
|
+
after = text[pos:end].split('\n', 1)[0]
|
70
|
+
return before + after + '\n' + ' ' * len(before.expandtabs()) + '^\n'
|
71
|
+
else:
|
72
|
+
before = text[start:pos].rsplit(b'\n', 1)[-1]
|
73
|
+
after = text[pos:end].split(b'\n', 1)[0]
|
74
|
+
return (before + after + b'\n' + b' ' * len(before.expandtabs()) + b'^\n').decode("ascii", "backslashreplace")
|
75
|
+
|
76
|
+
def match_examples(self, parse_fn: 'Callable[[str], Tree]',
|
77
|
+
examples: Union[Mapping[T, Iterable[str]], Iterable[Tuple[T, Iterable[str]]]],
|
78
|
+
token_type_match_fallback: bool=False,
|
79
|
+
use_accepts: bool=True
|
80
|
+
) -> Optional[T]:
|
81
|
+
"""Allows you to detect what's wrong in the input text by matching
|
82
|
+
against example errors.
|
83
|
+
|
84
|
+
Given a parser instance and a dictionary mapping some label with
|
85
|
+
some malformed syntax examples, it'll return the label for the
|
86
|
+
example that bests matches the current error. The function will
|
87
|
+
iterate the dictionary until it finds a matching error, and
|
88
|
+
return the corresponding value.
|
89
|
+
|
90
|
+
For an example usage, see `examples/error_reporting_lalr.py`
|
91
|
+
|
92
|
+
Parameters:
|
93
|
+
parse_fn: parse function (usually ``lark_instance.parse``)
|
94
|
+
examples: dictionary of ``{'example_string': value}``.
|
95
|
+
use_accepts: Recommended to keep this as ``use_accepts=True``.
|
96
|
+
"""
|
97
|
+
assert self.state is not None, "Not supported for this exception"
|
98
|
+
|
99
|
+
if isinstance(examples, Mapping):
|
100
|
+
examples = examples.items()
|
101
|
+
|
102
|
+
candidate = (None, False)
|
103
|
+
for i, (label, example) in enumerate(examples):
|
104
|
+
assert not isinstance(example, str), "Expecting a list"
|
105
|
+
|
106
|
+
for j, malformed in enumerate(example):
|
107
|
+
try:
|
108
|
+
parse_fn(malformed)
|
109
|
+
except UnexpectedInput as ut:
|
110
|
+
if ut.state == self.state:
|
111
|
+
if (
|
112
|
+
use_accepts
|
113
|
+
and isinstance(self, UnexpectedToken)
|
114
|
+
and isinstance(ut, UnexpectedToken)
|
115
|
+
and ut.accepts != self.accepts
|
116
|
+
):
|
117
|
+
logger.debug("Different accepts with same state[%d]: %s != %s at example [%s][%s]" %
|
118
|
+
(self.state, self.accepts, ut.accepts, i, j))
|
119
|
+
continue
|
120
|
+
if (
|
121
|
+
isinstance(self, (UnexpectedToken, UnexpectedEOF))
|
122
|
+
and isinstance(ut, (UnexpectedToken, UnexpectedEOF))
|
123
|
+
):
|
124
|
+
if ut.token == self.token: # Try exact match first
|
125
|
+
logger.debug("Exact Match at example [%s][%s]" % (i, j))
|
126
|
+
return label
|
127
|
+
|
128
|
+
if token_type_match_fallback:
|
129
|
+
# Fallback to token types match
|
130
|
+
if (ut.token.type == self.token.type) and not candidate[-1]:
|
131
|
+
logger.debug("Token Type Fallback at example [%s][%s]" % (i, j))
|
132
|
+
candidate = label, True
|
133
|
+
|
134
|
+
if candidate[0] is None:
|
135
|
+
logger.debug("Same State match at example [%s][%s]" % (i, j))
|
136
|
+
candidate = label, False
|
137
|
+
|
138
|
+
return candidate[0]
|
139
|
+
|
140
|
+
def _format_expected(self, expected):
|
141
|
+
if self._terminals_by_name:
|
142
|
+
d = self._terminals_by_name
|
143
|
+
expected = [d[t_name].user_repr() if t_name in d else t_name for t_name in expected]
|
144
|
+
return "Expected one of: \n\t* %s\n" % '\n\t* '.join(expected)
|
145
|
+
|
146
|
+
|
147
|
+
class UnexpectedEOF(ParseError, UnexpectedInput):
|
148
|
+
"""An exception that is raised by the parser, when the input ends while it still expects a token.
|
149
|
+
"""
|
150
|
+
expected: 'List[Token]'
|
151
|
+
|
152
|
+
def __init__(self, expected, state=None, terminals_by_name=None):
|
153
|
+
super(UnexpectedEOF, self).__init__()
|
154
|
+
|
155
|
+
self.expected = expected
|
156
|
+
self.state = state
|
157
|
+
from .lexer import Token
|
158
|
+
self.token = Token("<EOF>", "") # , line=-1, column=-1, pos_in_stream=-1)
|
159
|
+
self.pos_in_stream = -1
|
160
|
+
self.line = -1
|
161
|
+
self.column = -1
|
162
|
+
self._terminals_by_name = terminals_by_name
|
163
|
+
|
164
|
+
|
165
|
+
def __str__(self):
|
166
|
+
message = "Unexpected end-of-input. "
|
167
|
+
message += self._format_expected(self.expected)
|
168
|
+
return message
|
169
|
+
|
170
|
+
|
171
|
+
class UnexpectedCharacters(LexError, UnexpectedInput):
|
172
|
+
"""An exception that is raised by the lexer, when it cannot match the next
|
173
|
+
string of characters to any of its terminals.
|
174
|
+
"""
|
175
|
+
|
176
|
+
allowed: Set[str]
|
177
|
+
considered_tokens: Set[Any]
|
178
|
+
|
179
|
+
def __init__(self, seq, lex_pos, line, column, allowed=None, considered_tokens=None, state=None, token_history=None,
|
180
|
+
terminals_by_name=None, considered_rules=None):
|
181
|
+
super(UnexpectedCharacters, self).__init__()
|
182
|
+
|
183
|
+
# TODO considered_tokens and allowed can be figured out using state
|
184
|
+
self.line = line
|
185
|
+
self.column = column
|
186
|
+
self.pos_in_stream = lex_pos
|
187
|
+
self.state = state
|
188
|
+
self._terminals_by_name = terminals_by_name
|
189
|
+
|
190
|
+
self.allowed = allowed
|
191
|
+
self.considered_tokens = considered_tokens
|
192
|
+
self.considered_rules = considered_rules
|
193
|
+
self.token_history = token_history
|
194
|
+
|
195
|
+
if isinstance(seq, bytes):
|
196
|
+
self.char = seq[lex_pos:lex_pos + 1].decode("ascii", "backslashreplace")
|
197
|
+
else:
|
198
|
+
self.char = seq[lex_pos]
|
199
|
+
self._context = self.get_context(seq)
|
200
|
+
|
201
|
+
|
202
|
+
def __str__(self):
|
203
|
+
message = "No terminal matches '%s' in the current parser context, at line %d col %d" % (self.char, self.line, self.column)
|
204
|
+
message += '\n\n' + self._context
|
205
|
+
if self.allowed:
|
206
|
+
message += self._format_expected(self.allowed)
|
207
|
+
if self.token_history:
|
208
|
+
message += '\nPrevious tokens: %s\n' % ', '.join(repr(t) for t in self.token_history)
|
209
|
+
return message
|
210
|
+
|
211
|
+
|
212
|
+
class UnexpectedToken(ParseError, UnexpectedInput):
|
213
|
+
"""An exception that is raised by the parser, when the token it received
|
214
|
+
doesn't match any valid step forward.
|
215
|
+
|
216
|
+
Parameters:
|
217
|
+
token: The mismatched token
|
218
|
+
expected: The set of expected tokens
|
219
|
+
considered_rules: Which rules were considered, to deduce the expected tokens
|
220
|
+
state: A value representing the parser state. Do not rely on its value or type.
|
221
|
+
interactive_parser: An instance of ``InteractiveParser``, that is initialized to the point of failure,
|
222
|
+
and can be used for debugging and error handling.
|
223
|
+
|
224
|
+
Note: These parameters are available as attributes of the instance.
|
225
|
+
"""
|
226
|
+
|
227
|
+
expected: Set[str]
|
228
|
+
considered_rules: Set[str]
|
229
|
+
|
230
|
+
def __init__(self, token, expected, considered_rules=None, state=None, interactive_parser=None, terminals_by_name=None, token_history=None):
|
231
|
+
super(UnexpectedToken, self).__init__()
|
232
|
+
|
233
|
+
# TODO considered_rules and expected can be figured out using state
|
234
|
+
self.line = getattr(token, 'line', '?')
|
235
|
+
self.column = getattr(token, 'column', '?')
|
236
|
+
self.pos_in_stream = getattr(token, 'start_pos', None)
|
237
|
+
self.state = state
|
238
|
+
|
239
|
+
self.token = token
|
240
|
+
self.expected = expected # XXX deprecate? `accepts` is better
|
241
|
+
self._accepts = NO_VALUE
|
242
|
+
self.considered_rules = considered_rules
|
243
|
+
self.interactive_parser = interactive_parser
|
244
|
+
self._terminals_by_name = terminals_by_name
|
245
|
+
self.token_history = token_history
|
246
|
+
|
247
|
+
|
248
|
+
@property
|
249
|
+
def accepts(self) -> Set[str]:
|
250
|
+
if self._accepts is NO_VALUE:
|
251
|
+
self._accepts = self.interactive_parser and self.interactive_parser.accepts()
|
252
|
+
return self._accepts
|
253
|
+
|
254
|
+
def __str__(self):
|
255
|
+
message = ("Unexpected token %r at line %s, column %s.\n%s"
|
256
|
+
% (self.token, self.line, self.column, self._format_expected(self.accepts or self.expected)))
|
257
|
+
if self.token_history:
|
258
|
+
message += "Previous tokens: %r\n" % self.token_history
|
259
|
+
|
260
|
+
return message
|
261
|
+
|
262
|
+
|
263
|
+
|
264
|
+
class VisitError(LarkError):
|
265
|
+
"""VisitError is raised when visitors are interrupted by an exception
|
266
|
+
|
267
|
+
It provides the following attributes for inspection:
|
268
|
+
|
269
|
+
Parameters:
|
270
|
+
rule: the name of the visit rule that failed
|
271
|
+
obj: the tree-node or token that was being processed
|
272
|
+
orig_exc: the exception that cause it to fail
|
273
|
+
|
274
|
+
Note: These parameters are available as attributes
|
275
|
+
"""
|
276
|
+
|
277
|
+
obj: 'Union[Tree, Token]'
|
278
|
+
orig_exc: Exception
|
279
|
+
|
280
|
+
def __init__(self, rule, obj, orig_exc):
|
281
|
+
message = 'Error trying to process rule "%s":\n\n%s' % (rule, orig_exc)
|
282
|
+
super(VisitError, self).__init__(message)
|
283
|
+
|
284
|
+
self.rule = rule
|
285
|
+
self.obj = obj
|
286
|
+
self.orig_exc = orig_exc
|
287
|
+
|
288
|
+
|
289
|
+
class MissingVariableError(LarkError):
|
290
|
+
pass
|
291
|
+
|
292
|
+
###}
|
@@ -0,0 +1,130 @@
|
|
1
|
+
from typing import Optional, Tuple, ClassVar, Sequence
|
2
|
+
|
3
|
+
from .utils import Serialize
|
4
|
+
|
5
|
+
###{standalone
|
6
|
+
TOKEN_DEFAULT_PRIORITY = 0
|
7
|
+
|
8
|
+
|
9
|
+
class Symbol(Serialize):
|
10
|
+
__slots__ = ('name',)
|
11
|
+
|
12
|
+
name: str
|
13
|
+
is_term: ClassVar[bool] = NotImplemented
|
14
|
+
|
15
|
+
def __init__(self, name: str) -> None:
|
16
|
+
self.name = name
|
17
|
+
|
18
|
+
def __eq__(self, other):
|
19
|
+
assert isinstance(other, Symbol), other
|
20
|
+
return self.is_term == other.is_term and self.name == other.name
|
21
|
+
|
22
|
+
def __ne__(self, other):
|
23
|
+
return not (self == other)
|
24
|
+
|
25
|
+
def __hash__(self):
|
26
|
+
return hash(self.name)
|
27
|
+
|
28
|
+
def __repr__(self):
|
29
|
+
return '%s(%r)' % (type(self).__name__, self.name)
|
30
|
+
|
31
|
+
fullrepr = property(__repr__)
|
32
|
+
|
33
|
+
def renamed(self, f):
|
34
|
+
return type(self)(f(self.name))
|
35
|
+
|
36
|
+
|
37
|
+
class Terminal(Symbol):
|
38
|
+
__serialize_fields__ = 'name', 'filter_out'
|
39
|
+
|
40
|
+
is_term: ClassVar[bool] = True
|
41
|
+
|
42
|
+
def __init__(self, name, filter_out=False):
|
43
|
+
self.name = name
|
44
|
+
self.filter_out = filter_out
|
45
|
+
|
46
|
+
@property
|
47
|
+
def fullrepr(self):
|
48
|
+
return '%s(%r, %r)' % (type(self).__name__, self.name, self.filter_out)
|
49
|
+
|
50
|
+
def renamed(self, f):
|
51
|
+
return type(self)(f(self.name), self.filter_out)
|
52
|
+
|
53
|
+
|
54
|
+
class NonTerminal(Symbol):
|
55
|
+
__serialize_fields__ = 'name',
|
56
|
+
|
57
|
+
is_term: ClassVar[bool] = False
|
58
|
+
|
59
|
+
|
60
|
+
class RuleOptions(Serialize):
|
61
|
+
__serialize_fields__ = 'keep_all_tokens', 'expand1', 'priority', 'template_source', 'empty_indices'
|
62
|
+
|
63
|
+
keep_all_tokens: bool
|
64
|
+
expand1: bool
|
65
|
+
priority: Optional[int]
|
66
|
+
template_source: Optional[str]
|
67
|
+
empty_indices: Tuple[bool, ...]
|
68
|
+
|
69
|
+
def __init__(self, keep_all_tokens: bool=False, expand1: bool=False, priority: Optional[int]=None, template_source: Optional[str]=None, empty_indices: Tuple[bool, ...]=()) -> None:
|
70
|
+
self.keep_all_tokens = keep_all_tokens
|
71
|
+
self.expand1 = expand1
|
72
|
+
self.priority = priority
|
73
|
+
self.template_source = template_source
|
74
|
+
self.empty_indices = empty_indices
|
75
|
+
|
76
|
+
def __repr__(self):
|
77
|
+
return 'RuleOptions(%r, %r, %r, %r)' % (
|
78
|
+
self.keep_all_tokens,
|
79
|
+
self.expand1,
|
80
|
+
self.priority,
|
81
|
+
self.template_source
|
82
|
+
)
|
83
|
+
|
84
|
+
|
85
|
+
class Rule(Serialize):
|
86
|
+
"""
|
87
|
+
origin : a symbol
|
88
|
+
expansion : a list of symbols
|
89
|
+
order : index of this expansion amongst all rules of the same name
|
90
|
+
"""
|
91
|
+
__slots__ = ('origin', 'expansion', 'alias', 'options', 'order', '_hash')
|
92
|
+
|
93
|
+
__serialize_fields__ = 'origin', 'expansion', 'order', 'alias', 'options'
|
94
|
+
__serialize_namespace__ = Terminal, NonTerminal, RuleOptions
|
95
|
+
|
96
|
+
origin: NonTerminal
|
97
|
+
expansion: Sequence[Symbol]
|
98
|
+
order: int
|
99
|
+
alias: Optional[str]
|
100
|
+
options: RuleOptions
|
101
|
+
_hash: int
|
102
|
+
|
103
|
+
def __init__(self, origin: NonTerminal, expansion: Sequence[Symbol],
|
104
|
+
order: int=0, alias: Optional[str]=None, options: Optional[RuleOptions]=None):
|
105
|
+
self.origin = origin
|
106
|
+
self.expansion = expansion
|
107
|
+
self.alias = alias
|
108
|
+
self.order = order
|
109
|
+
self.options = options or RuleOptions()
|
110
|
+
self._hash = hash((self.origin, tuple(self.expansion)))
|
111
|
+
|
112
|
+
def _deserialize(self):
|
113
|
+
self._hash = hash((self.origin, tuple(self.expansion)))
|
114
|
+
|
115
|
+
def __str__(self):
|
116
|
+
return '<%s : %s>' % (self.origin.name, ' '.join(x.name for x in self.expansion))
|
117
|
+
|
118
|
+
def __repr__(self):
|
119
|
+
return 'Rule(%r, %r, %r, %r)' % (self.origin, self.expansion, self.alias, self.options)
|
120
|
+
|
121
|
+
def __hash__(self):
|
122
|
+
return self._hash
|
123
|
+
|
124
|
+
def __eq__(self, other):
|
125
|
+
if not isinstance(other, Rule):
|
126
|
+
return False
|
127
|
+
return self.origin == other.origin and self.expansion == other.expansion
|
128
|
+
|
129
|
+
|
130
|
+
###}
|
File without changes
|
@@ -0,0 +1,143 @@
|
|
1
|
+
"Provides a post-lexer for implementing Python-style indentation."
|
2
|
+
|
3
|
+
from abc import ABC, abstractmethod
|
4
|
+
from typing import List, Iterator
|
5
|
+
|
6
|
+
from .exceptions import LarkError
|
7
|
+
from .lark import PostLex
|
8
|
+
from .lexer import Token
|
9
|
+
|
10
|
+
###{standalone
|
11
|
+
|
12
|
+
class DedentError(LarkError):
|
13
|
+
pass
|
14
|
+
|
15
|
+
class Indenter(PostLex, ABC):
|
16
|
+
"""This is a postlexer that "injects" indent/dedent tokens based on indentation.
|
17
|
+
|
18
|
+
It keeps track of the current indentation, as well as the current level of parentheses.
|
19
|
+
Inside parentheses, the indentation is ignored, and no indent/dedent tokens get generated.
|
20
|
+
|
21
|
+
Note: This is an abstract class. To use it, inherit and implement all its abstract methods:
|
22
|
+
- tab_len
|
23
|
+
- NL_type
|
24
|
+
- OPEN_PAREN_types, CLOSE_PAREN_types
|
25
|
+
- INDENT_type, DEDENT_type
|
26
|
+
|
27
|
+
See also: the ``postlex`` option in `Lark`.
|
28
|
+
"""
|
29
|
+
paren_level: int
|
30
|
+
indent_level: List[int]
|
31
|
+
|
32
|
+
def __init__(self) -> None:
|
33
|
+
self.paren_level = 0
|
34
|
+
self.indent_level = [0]
|
35
|
+
assert self.tab_len > 0
|
36
|
+
|
37
|
+
def handle_NL(self, token: Token) -> Iterator[Token]:
|
38
|
+
if self.paren_level > 0:
|
39
|
+
return
|
40
|
+
|
41
|
+
yield token
|
42
|
+
|
43
|
+
indent_str = token.rsplit('\n', 1)[1] # Tabs and spaces
|
44
|
+
indent = indent_str.count(' ') + indent_str.count('\t') * self.tab_len
|
45
|
+
|
46
|
+
if indent > self.indent_level[-1]:
|
47
|
+
self.indent_level.append(indent)
|
48
|
+
yield Token.new_borrow_pos(self.INDENT_type, indent_str, token)
|
49
|
+
else:
|
50
|
+
while indent < self.indent_level[-1]:
|
51
|
+
self.indent_level.pop()
|
52
|
+
yield Token.new_borrow_pos(self.DEDENT_type, indent_str, token)
|
53
|
+
|
54
|
+
if indent != self.indent_level[-1]:
|
55
|
+
raise DedentError('Unexpected dedent to column %s. Expected dedent to %s' % (indent, self.indent_level[-1]))
|
56
|
+
|
57
|
+
def _process(self, stream):
|
58
|
+
for token in stream:
|
59
|
+
if token.type == self.NL_type:
|
60
|
+
yield from self.handle_NL(token)
|
61
|
+
else:
|
62
|
+
yield token
|
63
|
+
|
64
|
+
if token.type in self.OPEN_PAREN_types:
|
65
|
+
self.paren_level += 1
|
66
|
+
elif token.type in self.CLOSE_PAREN_types:
|
67
|
+
self.paren_level -= 1
|
68
|
+
assert self.paren_level >= 0
|
69
|
+
|
70
|
+
while len(self.indent_level) > 1:
|
71
|
+
self.indent_level.pop()
|
72
|
+
yield Token(self.DEDENT_type, '')
|
73
|
+
|
74
|
+
assert self.indent_level == [0], self.indent_level
|
75
|
+
|
76
|
+
def process(self, stream):
|
77
|
+
self.paren_level = 0
|
78
|
+
self.indent_level = [0]
|
79
|
+
return self._process(stream)
|
80
|
+
|
81
|
+
# XXX Hack for ContextualLexer. Maybe there's a more elegant solution?
|
82
|
+
@property
|
83
|
+
def always_accept(self):
|
84
|
+
return (self.NL_type,)
|
85
|
+
|
86
|
+
@property
|
87
|
+
@abstractmethod
|
88
|
+
def NL_type(self) -> str:
|
89
|
+
"The name of the newline token"
|
90
|
+
raise NotImplementedError()
|
91
|
+
|
92
|
+
@property
|
93
|
+
@abstractmethod
|
94
|
+
def OPEN_PAREN_types(self) -> List[str]:
|
95
|
+
"The names of the tokens that open a parenthesis"
|
96
|
+
raise NotImplementedError()
|
97
|
+
|
98
|
+
@property
|
99
|
+
@abstractmethod
|
100
|
+
def CLOSE_PAREN_types(self) -> List[str]:
|
101
|
+
"""The names of the tokens that close a parenthesis
|
102
|
+
"""
|
103
|
+
raise NotImplementedError()
|
104
|
+
|
105
|
+
@property
|
106
|
+
@abstractmethod
|
107
|
+
def INDENT_type(self) -> str:
|
108
|
+
"""The name of the token that starts an indentation in the grammar.
|
109
|
+
|
110
|
+
See also: %declare
|
111
|
+
"""
|
112
|
+
raise NotImplementedError()
|
113
|
+
|
114
|
+
@property
|
115
|
+
@abstractmethod
|
116
|
+
def DEDENT_type(self) -> str:
|
117
|
+
"""The name of the token that end an indentation in the grammar.
|
118
|
+
|
119
|
+
See also: %declare
|
120
|
+
"""
|
121
|
+
raise NotImplementedError()
|
122
|
+
|
123
|
+
@property
|
124
|
+
@abstractmethod
|
125
|
+
def tab_len(self) -> int:
|
126
|
+
"""How many spaces does a tab equal"""
|
127
|
+
raise NotImplementedError()
|
128
|
+
|
129
|
+
|
130
|
+
class PythonIndenter(Indenter):
|
131
|
+
"""A postlexer that "injects" _INDENT/_DEDENT tokens based on indentation, according to the Python syntax.
|
132
|
+
|
133
|
+
See also: the ``postlex`` option in `Lark`.
|
134
|
+
"""
|
135
|
+
|
136
|
+
NL_type = '_NEWLINE'
|
137
|
+
OPEN_PAREN_types = ['LPAR', 'LSQB', 'LBRACE']
|
138
|
+
CLOSE_PAREN_types = ['RPAR', 'RSQB', 'RBRACE']
|
139
|
+
INDENT_type = '_INDENT'
|
140
|
+
DEDENT_type = '_DEDENT'
|
141
|
+
tab_len = 8
|
142
|
+
|
143
|
+
###}
|