omextra 0.0.0.dev495__tar.gz → 0.0.0.dev497__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {omextra-0.0.0.dev495/omextra.egg-info → omextra-0.0.0.dev497}/PKG-INFO +2 -2
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/abnf/__init__.py +8 -6
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/abnf/base.py +39 -2
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/abnf/internal.py +1 -1
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/abnf/meta.py +13 -2
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/abnf/ops.py +10 -5
- omextra-0.0.0.dev497/omextra/text/abnf/opto.py +154 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/abnf/parsing.py +81 -15
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497/omextra.egg-info}/PKG-INFO +2 -2
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra.egg-info/SOURCES.txt +1 -0
- omextra-0.0.0.dev497/omextra.egg-info/requires.txt +1 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/pyproject.toml +2 -2
- omextra-0.0.0.dev495/omextra.egg-info/requires.txt +0 -1
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/LICENSE +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/MANIFEST.in +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/README.md +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/.omlish-manifests.json +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/README.md +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/__about__.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/__init__.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/asyncs/__init__.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/asyncs/bluelet/LICENSE +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/asyncs/bluelet/__init__.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/asyncs/bluelet/all.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/asyncs/bluelet/api.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/asyncs/bluelet/core.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/asyncs/bluelet/events.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/asyncs/bluelet/files.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/asyncs/bluelet/runner.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/asyncs/bluelet/sockets.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/asyncs/bridge.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/collections/__init__.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/collections/hamt/LICENSE +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/collections/hamt/__init__.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/collections/hamt/_hamt.c +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/defs.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/dynamic.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/formats/__init__.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/formats/goyaml/LICENSE +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/formats/goyaml/__init__.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/formats/goyaml/ast.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/formats/goyaml/errors.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/formats/goyaml/parsing.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/formats/goyaml/scanning.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/formats/goyaml/tokens.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/formats/json/Json.g4 +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/formats/json/__init__.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/formats/json/_antlr/JsonLexer.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/formats/json/_antlr/JsonListener.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/formats/json/_antlr/JsonParser.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/formats/json/_antlr/JsonVisitor.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/formats/json/_antlr/__init__.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/formats/json5/Json5.g4 +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/formats/json5/__init__.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/formats/json5/_antlr/Json5Lexer.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/formats/json5/_antlr/Json5Listener.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/formats/json5/_antlr/Json5Parser.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/formats/json5/_antlr/Json5Visitor.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/formats/json5/_antlr/__init__.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/formats/json5/parsing.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/io/__init__.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/io/trampoline.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/specs/__init__.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/specs/proto/Protobuf3.g4 +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/specs/proto/__init__.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/specs/proto/_antlr/Protobuf3Lexer.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/specs/proto/_antlr/Protobuf3Listener.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/specs/proto/_antlr/Protobuf3Parser.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/specs/proto/_antlr/Protobuf3Visitor.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/specs/proto/_antlr/__init__.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/specs/proto/nodes.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/specs/proto/parsing.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/sql/__init__.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/sql/parsing/Minisql.g4 +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/sql/parsing/__init__.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/sql/parsing/_antlr/MinisqlLexer.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/sql/parsing/_antlr/MinisqlListener.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/sql/parsing/_antlr/MinisqlParser.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/sql/parsing/_antlr/MinisqlVisitor.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/sql/parsing/_antlr/__init__.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/sql/parsing/parsing.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/__init__.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/abnf/LICENSE +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/abnf/_dataclasses.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/abnf/core.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/abnf/docs/__init__.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/abnf/docs/rfc5234.txt +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/abnf/docs/rfc7405.txt +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/abnf/errors.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/abnf/utils.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/abnf/visitors.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/__init__.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/BufferedTokenStream.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/CommonTokenFactory.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/CommonTokenStream.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/FileStream.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/InputStream.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/IntervalSet.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/LICENSE.txt +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/LL1Analyzer.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/Lexer.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/ListTokenSource.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/Parser.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/ParserInterpreter.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/ParserRuleContext.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/PredictionContext.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/Recognizer.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/RuleContext.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/StdinStream.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/Token.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/TokenStreamRewriter.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/Utils.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/__init__.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/_all.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/_pygrun.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/atn/ATN.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/atn/ATNConfig.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/atn/ATNConfigSet.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/atn/ATNDeserializationOptions.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/atn/ATNDeserializer.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/atn/ATNSimulator.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/atn/ATNState.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/atn/ATNType.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/atn/LexerATNSimulator.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/atn/LexerAction.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/atn/LexerActionExecutor.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/atn/ParserATNSimulator.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/atn/PredictionMode.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/atn/SemanticContext.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/atn/Transition.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/atn/__init__.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/dfa/DFA.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/dfa/DFASerializer.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/dfa/DFAState.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/dfa/__init__.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/error/DiagnosticErrorListener.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/error/ErrorListener.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/error/ErrorStrategy.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/error/Errors.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/error/__init__.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/tree/Chunk.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/tree/ParseTreeMatch.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/tree/ParseTreePattern.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/tree/ParseTreePatternMatcher.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/tree/RuleTagToken.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/tree/TokenTagToken.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/tree/Tree.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/tree/Trees.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/tree/__init__.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/xpath/XPath.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/xpath/XPathLexer.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/_runtime/xpath/__init__.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/cli/__init__.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/cli/__main__.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/cli/cli.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/cli/consts.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/cli/gen.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/delimit.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/dot.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/errors.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/input.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/parsing.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/runtime.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra/text/antlr/utils.py +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra.egg-info/dependency_links.txt +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra.egg-info/entry_points.txt +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/omextra.egg-info/top_level.txt +0 -0
- {omextra-0.0.0.dev495 → omextra-0.0.0.dev497}/setup.cfg +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: omextra
|
|
3
|
-
Version: 0.0.0.
|
|
3
|
+
Version: 0.0.0.dev497
|
|
4
4
|
Summary: omextra
|
|
5
5
|
Author: wrmsr
|
|
6
6
|
License-Expression: BSD-3-Clause
|
|
@@ -14,7 +14,7 @@ Classifier: Programming Language :: Python :: 3.13
|
|
|
14
14
|
Requires-Python: >=3.13
|
|
15
15
|
Description-Content-Type: text/markdown
|
|
16
16
|
License-File: LICENSE
|
|
17
|
-
Requires-Dist: omlish==0.0.0.
|
|
17
|
+
Requires-Dist: omlish==0.0.0.dev497
|
|
18
18
|
Dynamic: license-file
|
|
19
19
|
|
|
20
20
|
# Overview
|
|
@@ -5,20 +5,18 @@ Originally based on library by Charles Yeomans (see LICENSE file):
|
|
|
5
5
|
|
|
6
6
|
https://github.com/declaresub/abnf/tree/561ced67c0a8afc869ad0de5b39dbe4f6e71b0d8/src/abnf
|
|
7
7
|
|
|
8
|
-
It has however been
|
|
8
|
+
It has however been entirely rewritten.
|
|
9
9
|
|
|
10
10
|
====
|
|
11
11
|
|
|
12
12
|
TODO:
|
|
13
|
-
-
|
|
14
|
-
- get greedier
|
|
15
|
-
- match-powered optimizer
|
|
16
|
-
- greedily compile regexes
|
|
13
|
+
- opto
|
|
17
14
|
- error reporting
|
|
18
|
-
- codegen
|
|
15
|
+
- codegen?
|
|
19
16
|
- fix_ws problem
|
|
20
17
|
- auto? no, need to keep lines / offsets accurate for errors
|
|
21
18
|
- relax CRLF rule by default?
|
|
19
|
+
- grammar transform? helper kwarg?
|
|
22
20
|
"""
|
|
23
21
|
from omlish import dataclasses as _dc # noqa
|
|
24
22
|
|
|
@@ -82,6 +80,10 @@ from .ops import ( # noqa
|
|
|
82
80
|
rule,
|
|
83
81
|
)
|
|
84
82
|
|
|
83
|
+
from .opto import ( # noqa
|
|
84
|
+
optimize_op,
|
|
85
|
+
)
|
|
86
|
+
|
|
85
87
|
from .utils import ( # noqa
|
|
86
88
|
strip_insignificant_match_rules,
|
|
87
89
|
only_match_rules,
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import abc
|
|
1
2
|
import io
|
|
2
3
|
import itertools
|
|
3
4
|
import typing as ta
|
|
@@ -10,6 +11,7 @@ from .errors import AbnfIncompleteParseError
|
|
|
10
11
|
|
|
11
12
|
|
|
12
13
|
with lang.auto_proxy_import(globals()):
|
|
14
|
+
from . import internal
|
|
13
15
|
from . import ops
|
|
14
16
|
from . import parsing
|
|
15
17
|
|
|
@@ -53,6 +55,8 @@ class Match(ta.NamedTuple):
|
|
|
53
55
|
write(f'literal<{self.start}-{self.end}>({o.value!r})')
|
|
54
56
|
elif isinstance(o, ops.RangeLiteral):
|
|
55
57
|
write(f'literal<{self.start}-{self.end}>({o.value.lo!r}-{o.value.hi!r})')
|
|
58
|
+
elif isinstance(o, internal.Regex):
|
|
59
|
+
write(f'regex<{self.start}-{self.end}>({o.pat.pattern!r})')
|
|
56
60
|
else:
|
|
57
61
|
write(f'{o.__class__.__name__.lower()}<{self.start}-{self.end}>')
|
|
58
62
|
if isinstance(o, ops.RuleRef):
|
|
@@ -110,8 +114,28 @@ class Op(lang.Abstract, lang.PackageSealed):
|
|
|
110
114
|
return f'{self.__class__.__name__}@{id(self):x}'
|
|
111
115
|
|
|
112
116
|
|
|
117
|
+
class CompositeOp(Op, lang.Abstract):
|
|
118
|
+
def __init_subclass__(cls, **kwargs: ta.Any) -> None:
|
|
119
|
+
super().__init_subclass__(**kwargs)
|
|
120
|
+
|
|
121
|
+
try:
|
|
122
|
+
leaf_op_cls = LeafOp
|
|
123
|
+
except NameError:
|
|
124
|
+
pass
|
|
125
|
+
else:
|
|
126
|
+
check.not_issubclass(cls, leaf_op_cls)
|
|
127
|
+
|
|
128
|
+
@property
|
|
129
|
+
@abc.abstractmethod
|
|
130
|
+
def children(self) -> ta.Sequence[Op]:
|
|
131
|
+
raise NotImplementedError
|
|
132
|
+
|
|
133
|
+
|
|
113
134
|
class LeafOp(Op, lang.Abstract):
|
|
114
|
-
|
|
135
|
+
def __init_subclass__(cls, **kwargs: ta.Any) -> None:
|
|
136
|
+
super().__init_subclass__(**kwargs)
|
|
137
|
+
|
|
138
|
+
check.not_issubclass(cls, CompositeOp)
|
|
115
139
|
|
|
116
140
|
|
|
117
141
|
##
|
|
@@ -128,13 +152,21 @@ class Rule(lang.Final):
|
|
|
128
152
|
super().__init__()
|
|
129
153
|
|
|
130
154
|
self._name = check.non_empty_str(name)
|
|
131
|
-
self._name_f = name.casefold()
|
|
132
155
|
self._op = check.isinstance(op, Op)
|
|
133
156
|
self._insignificant = insignificant
|
|
134
157
|
|
|
158
|
+
self._name_f = name.casefold()
|
|
159
|
+
|
|
135
160
|
def __repr__(self) -> str:
|
|
136
161
|
return f'{self.__class__.__name__}({self._name!r})'
|
|
137
162
|
|
|
163
|
+
def replace_op(self, op: Op) -> 'Rule':
|
|
164
|
+
return Rule(
|
|
165
|
+
self._name,
|
|
166
|
+
op,
|
|
167
|
+
insignificant=self._insignificant,
|
|
168
|
+
)
|
|
169
|
+
|
|
138
170
|
@property
|
|
139
171
|
def name(self) -> str:
|
|
140
172
|
return self._name
|
|
@@ -165,6 +197,7 @@ class Grammar(lang.Final):
|
|
|
165
197
|
rules_by_name_f: dict[str, Rule] = {}
|
|
166
198
|
rules_by_op: dict[Op, Rule] = {}
|
|
167
199
|
for gr in rules:
|
|
200
|
+
check.isinstance(gr, Rule)
|
|
168
201
|
check.not_in(gr, rules_set)
|
|
169
202
|
check.not_in(gr._name, rules_by_name) # noqa
|
|
170
203
|
check.not_in(gr._name_f, rules_by_name_f) # noqa
|
|
@@ -196,6 +229,7 @@ class Grammar(lang.Final):
|
|
|
196
229
|
*,
|
|
197
230
|
start: int = 0,
|
|
198
231
|
debug: int = 0,
|
|
232
|
+
**kwargs: ta.Any,
|
|
199
233
|
) -> ta.Iterator[Match]:
|
|
200
234
|
if root is None:
|
|
201
235
|
if (root := self._root) is None:
|
|
@@ -212,6 +246,7 @@ class Grammar(lang.Final):
|
|
|
212
246
|
root._op, # noqa
|
|
213
247
|
start,
|
|
214
248
|
debug=debug,
|
|
249
|
+
**kwargs,
|
|
215
250
|
)
|
|
216
251
|
|
|
217
252
|
def parse(
|
|
@@ -222,12 +257,14 @@ class Grammar(lang.Final):
|
|
|
222
257
|
start: int = 0,
|
|
223
258
|
complete: bool = False,
|
|
224
259
|
debug: int = 0,
|
|
260
|
+
**kwargs: ta.Any,
|
|
225
261
|
) -> Match | None:
|
|
226
262
|
if (match := longest_match(self.iter_parse(
|
|
227
263
|
source,
|
|
228
264
|
root,
|
|
229
265
|
start=start,
|
|
230
266
|
debug=debug,
|
|
267
|
+
**kwargs,
|
|
231
268
|
))) is None:
|
|
232
269
|
return None
|
|
233
270
|
|
|
@@ -20,6 +20,7 @@ from .ops import literal
|
|
|
20
20
|
from .ops import option
|
|
21
21
|
from .ops import repeat
|
|
22
22
|
from .ops import rule
|
|
23
|
+
from .opto import optimize_op
|
|
23
24
|
from .utils import fix_ws
|
|
24
25
|
from .utils import parse_rules
|
|
25
26
|
from .visitors import RuleMatchVisitor
|
|
@@ -558,8 +559,9 @@ class MetaGrammarRuleMatchVisitor(RuleMatchVisitor[ta.Any]):
|
|
|
558
559
|
def parse_grammar(
|
|
559
560
|
source: str,
|
|
560
561
|
*,
|
|
561
|
-
no_core_rules: bool = False,
|
|
562
562
|
root: str | None = None,
|
|
563
|
+
no_core_rules: bool = False,
|
|
564
|
+
no_optimize: bool = False,
|
|
563
565
|
**kwargs: ta.Any,
|
|
564
566
|
) -> Grammar:
|
|
565
567
|
source = fix_ws(source)
|
|
@@ -575,7 +577,16 @@ def parse_grammar(
|
|
|
575
577
|
check.isinstance(mg_m.op, Repeat)
|
|
576
578
|
|
|
577
579
|
mg_rmv = MetaGrammarRuleMatchVisitor(source)
|
|
578
|
-
rules = [
|
|
580
|
+
rules = [
|
|
581
|
+
check.isinstance(mg_rmv.visit_match(gg_cm), Rule)
|
|
582
|
+
for gg_cm in mg_m.children
|
|
583
|
+
]
|
|
584
|
+
|
|
585
|
+
if not no_optimize:
|
|
586
|
+
rules = [
|
|
587
|
+
r.replace_op(optimize_op(r.op))
|
|
588
|
+
for r in rules
|
|
589
|
+
]
|
|
579
590
|
|
|
580
591
|
return Grammar(
|
|
581
592
|
*rules,
|
|
@@ -4,6 +4,7 @@ from omlish import check
|
|
|
4
4
|
from omlish import dataclasses as dc
|
|
5
5
|
from omlish import lang
|
|
6
6
|
|
|
7
|
+
from .base import CompositeOp
|
|
7
8
|
from .base import LeafOp
|
|
8
9
|
from .base import Op
|
|
9
10
|
|
|
@@ -54,8 +55,8 @@ class RangeLiteral(Literal, lang.Final):
|
|
|
54
55
|
hi: str
|
|
55
56
|
|
|
56
57
|
def __post_init__(self) -> None:
|
|
57
|
-
|
|
58
|
-
|
|
58
|
+
for c in (self.lo, self.hi):
|
|
59
|
+
check.equal(len(check.non_empty_str(c)), 1)
|
|
59
60
|
check.state(self.hi >= self.lo)
|
|
60
61
|
|
|
61
62
|
def __init__(self, value: Range) -> None:
|
|
@@ -101,7 +102,7 @@ def literal(*args, case_sensitive=None):
|
|
|
101
102
|
|
|
102
103
|
|
|
103
104
|
@ta.final
|
|
104
|
-
class Concat(
|
|
105
|
+
class Concat(CompositeOp, lang.Final):
|
|
105
106
|
def __init__(self, *children: Op) -> None:
|
|
106
107
|
super().__init__()
|
|
107
108
|
|
|
@@ -124,7 +125,7 @@ concat = Concat
|
|
|
124
125
|
|
|
125
126
|
|
|
126
127
|
@ta.final
|
|
127
|
-
class Repeat(
|
|
128
|
+
class Repeat(CompositeOp, lang.Final):
|
|
128
129
|
@dc.dataclass(frozen=True)
|
|
129
130
|
class Times:
|
|
130
131
|
min: int = 0
|
|
@@ -159,6 +160,10 @@ class Repeat(Op, lang.Final):
|
|
|
159
160
|
def child(self) -> Op:
|
|
160
161
|
return self._child
|
|
161
162
|
|
|
163
|
+
@property
|
|
164
|
+
def children(self) -> ta.Sequence[Op]:
|
|
165
|
+
return (self._child,)
|
|
166
|
+
|
|
162
167
|
def __repr__(self) -> str:
|
|
163
168
|
return f'{self.__class__.__name__}@{id(self):x}({self._times}, {self._child!r})'
|
|
164
169
|
|
|
@@ -223,7 +228,7 @@ def option(child: Op) -> Repeat:
|
|
|
223
228
|
|
|
224
229
|
|
|
225
230
|
@ta.final
|
|
226
|
-
class Either(
|
|
231
|
+
class Either(CompositeOp, lang.Final):
|
|
227
232
|
def __init__(self, *children: Op, first_match: bool = False) -> None:
|
|
228
233
|
super().__init__()
|
|
229
234
|
|
|
@@ -0,0 +1,154 @@
|
|
|
1
|
+
"""
|
|
2
|
+
TODO:
|
|
3
|
+
- Merge concat
|
|
4
|
+
- Merge concatted literals
|
|
5
|
+
- Regex
|
|
6
|
+
"""
|
|
7
|
+
import re
|
|
8
|
+
import typing as ta
|
|
9
|
+
|
|
10
|
+
from omlish import check
|
|
11
|
+
|
|
12
|
+
from .base import Op
|
|
13
|
+
from .internal import Regex
|
|
14
|
+
from .ops import CaseInsensitiveStringLiteral
|
|
15
|
+
from .ops import CompositeOp
|
|
16
|
+
from .ops import Concat
|
|
17
|
+
from .ops import Either
|
|
18
|
+
from .ops import RangeLiteral
|
|
19
|
+
from .ops import Repeat
|
|
20
|
+
from .ops import RuleRef
|
|
21
|
+
from .ops import StringLiteral
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
##
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def _build_op_regex_pat(op: Op, pats_by_op: ta.Mapping[Op, str | None]) -> str | None:
|
|
28
|
+
if isinstance(op, StringLiteral):
|
|
29
|
+
return re.escape(op.value)
|
|
30
|
+
|
|
31
|
+
elif isinstance(op, CaseInsensitiveStringLiteral):
|
|
32
|
+
return f'(?i:{re.escape(op.value)})'
|
|
33
|
+
|
|
34
|
+
elif isinstance(op, RangeLiteral):
|
|
35
|
+
lo = re.escape(op.value.lo)
|
|
36
|
+
hi = re.escape(op.value.hi)
|
|
37
|
+
return f'[{lo}-{hi}]'
|
|
38
|
+
|
|
39
|
+
elif isinstance(op, RuleRef):
|
|
40
|
+
return None
|
|
41
|
+
|
|
42
|
+
elif isinstance(op, Regex):
|
|
43
|
+
return op.pat.pattern
|
|
44
|
+
|
|
45
|
+
elif isinstance(op, Concat):
|
|
46
|
+
child_pats = [pats_by_op[child] for child in op.children]
|
|
47
|
+
if not all(ca is not None for ca in child_pats):
|
|
48
|
+
return None
|
|
49
|
+
return ''.join(ta.cast(str, ca) for ca in child_pats)
|
|
50
|
+
|
|
51
|
+
elif isinstance(op, Repeat):
|
|
52
|
+
if (child_pat := pats_by_op[op.child]) is None:
|
|
53
|
+
return None
|
|
54
|
+
|
|
55
|
+
# Wrap the child pattern in a non-capturing group if needed to ensure correct quantification. A pattern needs
|
|
56
|
+
# wrapping if it contains multiple elements or operators (e.g., 'ab', 'a|b'). Single character classes [a-z] and
|
|
57
|
+
# single escaped chars don't need wrapping.
|
|
58
|
+
needs_group = (
|
|
59
|
+
len(child_pat) > 1 and
|
|
60
|
+
not (child_pat.startswith('[') and child_pat.endswith(']'))
|
|
61
|
+
)
|
|
62
|
+
if needs_group:
|
|
63
|
+
child_pat = f'(?:{child_pat})'
|
|
64
|
+
|
|
65
|
+
times = op.times
|
|
66
|
+
if times.min == 0 and times.max is None:
|
|
67
|
+
quantifier = '*'
|
|
68
|
+
elif times.min == 1 and times.max is None:
|
|
69
|
+
quantifier = '+'
|
|
70
|
+
elif times.min == 0 and times.max == 1:
|
|
71
|
+
quantifier = '?'
|
|
72
|
+
elif times.max is None:
|
|
73
|
+
quantifier = f'{{{times.min},}}'
|
|
74
|
+
elif times.min == times.max:
|
|
75
|
+
quantifier = f'{{{times.min}}}'
|
|
76
|
+
else:
|
|
77
|
+
quantifier = f'{{{times.min},{times.max}}}'
|
|
78
|
+
|
|
79
|
+
return child_pat + quantifier
|
|
80
|
+
|
|
81
|
+
elif isinstance(op, Either):
|
|
82
|
+
# Only convert Either if first_match is True, as regex alternation uses first-match semantics. ABNF Either with
|
|
83
|
+
# first_match=False uses longest-match semantics, which differs from regex.
|
|
84
|
+
if not op.first_match:
|
|
85
|
+
return None
|
|
86
|
+
|
|
87
|
+
child_pats = [pats_by_op[child] for child in op.children]
|
|
88
|
+
if not all(ca is not None for ca in child_pats):
|
|
89
|
+
return None
|
|
90
|
+
|
|
91
|
+
# Build regex alternation. Use a capturing group for the alternation
|
|
92
|
+
return f'({"|".join(ta.cast("ta.Sequence[str]", child_pats))})'
|
|
93
|
+
|
|
94
|
+
else:
|
|
95
|
+
raise TypeError(op)
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def _regex_transform_single_op(op: Op, pats_by_op: ta.Mapping[Op, str | None]) -> Op:
|
|
99
|
+
pat = pats_by_op[op]
|
|
100
|
+
|
|
101
|
+
if pat is not None:
|
|
102
|
+
if isinstance(op, Regex):
|
|
103
|
+
return op
|
|
104
|
+
|
|
105
|
+
return Regex(re.compile(pat))
|
|
106
|
+
|
|
107
|
+
if isinstance(op, Concat):
|
|
108
|
+
new_children = tuple(_regex_transform_single_op(child, pats_by_op) for child in op.children)
|
|
109
|
+
if new_children == op.children:
|
|
110
|
+
return op
|
|
111
|
+
|
|
112
|
+
return Concat(*new_children)
|
|
113
|
+
|
|
114
|
+
elif isinstance(op, Repeat):
|
|
115
|
+
new_child = _regex_transform_single_op(op.child, pats_by_op)
|
|
116
|
+
if new_child == op.child:
|
|
117
|
+
return op
|
|
118
|
+
|
|
119
|
+
return Repeat(op.times, new_child)
|
|
120
|
+
|
|
121
|
+
elif isinstance(op, Either):
|
|
122
|
+
new_children = tuple(_regex_transform_single_op(child, pats_by_op) for child in op.children)
|
|
123
|
+
if new_children == op.children:
|
|
124
|
+
return op
|
|
125
|
+
|
|
126
|
+
return Either(*new_children, first_match=op.first_match)
|
|
127
|
+
|
|
128
|
+
return op
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
def regex_transform_op(op: Op) -> Op:
|
|
132
|
+
pats_by_op: dict[Op, str | None] = {}
|
|
133
|
+
|
|
134
|
+
def analyze_tree(o: Op) -> None:
|
|
135
|
+
check.not_in(o, pats_by_op)
|
|
136
|
+
|
|
137
|
+
if isinstance(o, CompositeOp):
|
|
138
|
+
for child in o.children:
|
|
139
|
+
analyze_tree(child)
|
|
140
|
+
|
|
141
|
+
pats_by_op[o] = _build_op_regex_pat(o, pats_by_op)
|
|
142
|
+
|
|
143
|
+
analyze_tree(op)
|
|
144
|
+
|
|
145
|
+
return _regex_transform_single_op(op, pats_by_op)
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
##
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
def optimize_op(op: Op) -> Op:
|
|
152
|
+
op = regex_transform_op(op)
|
|
153
|
+
|
|
154
|
+
return op
|
|
@@ -19,15 +19,21 @@ from .ops import StringLiteral
|
|
|
19
19
|
|
|
20
20
|
|
|
21
21
|
class _Parser:
|
|
22
|
+
class MaxStepsExceededError(Exception):
|
|
23
|
+
pass
|
|
24
|
+
|
|
22
25
|
def __init__(
|
|
23
26
|
self,
|
|
24
27
|
grammar: Grammar,
|
|
25
28
|
source: str,
|
|
29
|
+
*,
|
|
30
|
+
max_steps: int | None = None,
|
|
26
31
|
) -> None:
|
|
27
32
|
super().__init__()
|
|
28
33
|
|
|
29
34
|
self._grammar = grammar
|
|
30
35
|
self._source = source
|
|
36
|
+
self._max_steps = max_steps
|
|
31
37
|
|
|
32
38
|
self._dispatch: dict[type[Op], ta.Any] = {
|
|
33
39
|
StringLiteral: self._iter_parse_string_literal,
|
|
@@ -40,6 +46,10 @@ class _Parser:
|
|
|
40
46
|
Regex: self._iter_parse_regex,
|
|
41
47
|
}
|
|
42
48
|
|
|
49
|
+
self._memo: dict[tuple[Op, int], tuple[Match, ...]] = {}
|
|
50
|
+
|
|
51
|
+
self._cur_step = 0
|
|
52
|
+
|
|
43
53
|
def _iter_parse_string_literal(self, op: StringLiteral, start: int) -> ta.Iterator[Match]:
|
|
44
54
|
if start < len(self._source): # noqa
|
|
45
55
|
source = self._source[start : start + len(op._value)] # noqa
|
|
@@ -62,49 +72,90 @@ class _Parser:
|
|
|
62
72
|
yield Match(op, start, start + 1, ())
|
|
63
73
|
|
|
64
74
|
def _iter_parse_concat(self, op: Concat, start: int) -> ta.Iterator[Match]:
|
|
65
|
-
i = 0
|
|
66
75
|
match_tups: list[tuple[Match, ...]] = [()]
|
|
76
|
+
|
|
77
|
+
i = 0
|
|
67
78
|
for cp in op._children: # noqa
|
|
68
79
|
next_match_tups: list[tuple[Match, ...]] = []
|
|
80
|
+
|
|
69
81
|
for mt in match_tups:
|
|
70
82
|
for cm in self.iter_parse(cp, mt[-1].end if mt else start):
|
|
71
83
|
next_match_tups.append((*mt, cm))
|
|
72
84
|
i += 1
|
|
85
|
+
|
|
73
86
|
if not next_match_tups:
|
|
74
87
|
return
|
|
88
|
+
|
|
75
89
|
match_tups = next_match_tups
|
|
90
|
+
|
|
76
91
|
if not i:
|
|
77
92
|
return
|
|
93
|
+
|
|
78
94
|
for mt in sorted(match_tups, key=len, reverse=True):
|
|
79
95
|
yield Match(op, start, mt[-1].end if mt else start, mt)
|
|
80
96
|
|
|
81
97
|
def _iter_parse_repeat(self, op: Repeat, start: int) -> ta.Iterator[Match]:
|
|
82
|
-
|
|
83
|
-
|
|
98
|
+
# Map from (repetition_count, end_position) to longest match tuple
|
|
99
|
+
matches_by_count_pos: dict[tuple[int, int], tuple[Match, ...]] = {(0, start): ()}
|
|
100
|
+
max_end_by_count: dict[int, int] = {0: start}
|
|
101
|
+
|
|
84
102
|
i = 0
|
|
85
103
|
while True:
|
|
86
104
|
if op._times.max is not None and i == op._times.max: # noqa
|
|
87
105
|
break
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
106
|
+
|
|
107
|
+
if self._max_steps is not None and self._cur_step > self._max_steps:
|
|
108
|
+
raise _Parser.MaxStepsExceededError(self._cur_step)
|
|
109
|
+
self._cur_step += 1
|
|
110
|
+
|
|
111
|
+
next_matches: dict[tuple[int, int], tuple[Match, ...]] = {}
|
|
112
|
+
next_max_end = max_end_by_count.get(i, -1)
|
|
113
|
+
|
|
114
|
+
for (count, end_pos), mt in matches_by_count_pos.items():
|
|
115
|
+
if count != i:
|
|
116
|
+
continue
|
|
117
|
+
|
|
118
|
+
for cm in self.iter_parse(op._child, end_pos): # noqa
|
|
119
|
+
next_mt = (*mt, cm)
|
|
120
|
+
next_key = (i + 1, cm.end)
|
|
121
|
+
|
|
122
|
+
# Keep only the longest match tuple for each (count, position)
|
|
123
|
+
if next_key not in next_matches or len(next_mt) > len(next_matches[next_key]):
|
|
124
|
+
next_matches[next_key] = next_mt
|
|
125
|
+
if cm.end > next_max_end:
|
|
126
|
+
next_max_end = cm.end
|
|
127
|
+
|
|
128
|
+
if not next_matches:
|
|
129
|
+
break
|
|
130
|
+
|
|
131
|
+
# Check if we made progress (reached new positions)
|
|
132
|
+
if next_max_end <= max_end_by_count.get(i, -1):
|
|
93
133
|
break
|
|
134
|
+
|
|
94
135
|
i += 1
|
|
95
|
-
|
|
96
|
-
|
|
136
|
+
matches_by_count_pos.update(next_matches)
|
|
137
|
+
max_end_by_count[i] = next_max_end
|
|
138
|
+
|
|
97
139
|
if i < op._times.min: # noqa
|
|
98
140
|
return
|
|
99
|
-
|
|
100
|
-
|
|
141
|
+
|
|
142
|
+
# Collect valid matches and sort by (end_position, repetition_count) descending
|
|
143
|
+
valid_matches: list[tuple[int, int, tuple[Match, ...]]] = []
|
|
144
|
+
for (count, end_pos), mt in matches_by_count_pos.items():
|
|
145
|
+
if op._times.min <= count <= (op._times.max if op._times.max is not None else i): # noqa
|
|
146
|
+
valid_matches.append((end_pos, count, mt))
|
|
147
|
+
|
|
148
|
+
for end_pos, _, mt in sorted(valid_matches, key=lambda x: (x[0], x[1]), reverse=True):
|
|
149
|
+
yield Match(op, start, end_pos, mt)
|
|
101
150
|
|
|
102
151
|
def _iter_parse_either(self, op: Either, start: int) -> ta.Iterator[Match]:
|
|
103
152
|
for cp in op._children: # noqa
|
|
104
153
|
found = False
|
|
154
|
+
|
|
105
155
|
for cm in self.iter_parse(cp, start):
|
|
106
156
|
found = True
|
|
107
157
|
yield Match(op, start, cm.end, (cm,))
|
|
158
|
+
|
|
108
159
|
if found and op._first_match: # noqa
|
|
109
160
|
return
|
|
110
161
|
|
|
@@ -114,10 +165,21 @@ class _Parser:
|
|
|
114
165
|
yield Match(op, cm.start, cm.end, (cm,))
|
|
115
166
|
|
|
116
167
|
def _iter_parse_regex(self, op: Regex, start: int) -> ta.Iterator[Match]:
|
|
117
|
-
|
|
168
|
+
if (m := op._pat.match(self._source, start)) is not None: # noqa
|
|
169
|
+
yield Match(op, start, m.end(), ())
|
|
118
170
|
|
|
119
171
|
def iter_parse(self, op: Op, start: int) -> ta.Iterator[Match]:
|
|
120
|
-
|
|
172
|
+
if (key := (op, start)) in self._memo:
|
|
173
|
+
yield from self._memo[key]
|
|
174
|
+
return
|
|
175
|
+
|
|
176
|
+
if self._max_steps is not None and self._cur_step >= self._max_steps:
|
|
177
|
+
raise _Parser.MaxStepsExceededError(self._cur_step)
|
|
178
|
+
self._cur_step += 1
|
|
179
|
+
|
|
180
|
+
matches = tuple(self._dispatch[op.__class__](op, start))
|
|
181
|
+
self._memo[key] = matches
|
|
182
|
+
yield from matches
|
|
121
183
|
|
|
122
184
|
|
|
123
185
|
##
|
|
@@ -131,8 +193,9 @@ class _DebugParser(_Parser):
|
|
|
131
193
|
level: int = 1,
|
|
132
194
|
*,
|
|
133
195
|
write: ta.Callable[[str], None] | None = None,
|
|
196
|
+
**kwargs: ta.Any,
|
|
134
197
|
) -> None:
|
|
135
|
-
super().__init__(grammar, source)
|
|
198
|
+
super().__init__(grammar, source, **kwargs)
|
|
136
199
|
|
|
137
200
|
self._level = level
|
|
138
201
|
if write is None:
|
|
@@ -195,18 +258,21 @@ def _iter_parse(
|
|
|
195
258
|
start: int,
|
|
196
259
|
*,
|
|
197
260
|
debug: int = 0,
|
|
261
|
+
max_steps: int | None = None,
|
|
198
262
|
) -> ta.Iterator[Match]:
|
|
199
263
|
parser: _Parser
|
|
200
264
|
if debug:
|
|
201
265
|
parser = _DebugParser(
|
|
202
266
|
grammar,
|
|
203
267
|
source,
|
|
268
|
+
max_steps=max_steps,
|
|
204
269
|
level=debug,
|
|
205
270
|
)
|
|
206
271
|
else:
|
|
207
272
|
parser = _Parser(
|
|
208
273
|
grammar,
|
|
209
274
|
source,
|
|
275
|
+
max_steps=max_steps,
|
|
210
276
|
)
|
|
211
277
|
|
|
212
278
|
return parser.iter_parse(op, start)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: omextra
|
|
3
|
-
Version: 0.0.0.
|
|
3
|
+
Version: 0.0.0.dev497
|
|
4
4
|
Summary: omextra
|
|
5
5
|
Author: wrmsr
|
|
6
6
|
License-Expression: BSD-3-Clause
|
|
@@ -14,7 +14,7 @@ Classifier: Programming Language :: Python :: 3.13
|
|
|
14
14
|
Requires-Python: >=3.13
|
|
15
15
|
Description-Content-Type: text/markdown
|
|
16
16
|
License-File: LICENSE
|
|
17
|
-
Requires-Dist: omlish==0.0.0.
|
|
17
|
+
Requires-Dist: omlish==0.0.0.dev497
|
|
18
18
|
Dynamic: license-file
|
|
19
19
|
|
|
20
20
|
# Overview
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
omlish==0.0.0.dev497
|
|
@@ -13,7 +13,7 @@ urls = {source = 'https://github.com/wrmsr/omlish'}
|
|
|
13
13
|
license = 'BSD-3-Clause'
|
|
14
14
|
readme = 'README.md'
|
|
15
15
|
requires-python = '>=3.13'
|
|
16
|
-
version = '0.0.0.
|
|
16
|
+
version = '0.0.0.dev497'
|
|
17
17
|
classifiers = [
|
|
18
18
|
'Development Status :: 2 - Pre-Alpha',
|
|
19
19
|
'Intended Audience :: Developers',
|
|
@@ -24,7 +24,7 @@ classifiers = [
|
|
|
24
24
|
]
|
|
25
25
|
description = 'omextra'
|
|
26
26
|
dependencies = [
|
|
27
|
-
'omlish == 0.0.0.
|
|
27
|
+
'omlish == 0.0.0.dev497',
|
|
28
28
|
]
|
|
29
29
|
|
|
30
30
|
[project.optional-dependencies]
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
omlish==0.0.0.dev495
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|