gitlab-pygments.rb 0.3.2
Sign up to get free protection for your applications and to get access to all the features.
- data/.gitignore +6 -0
- data/Gemfile +2 -0
- data/README.md +91 -0
- data/Rakefile +78 -0
- data/bench.rb +22 -0
- data/cache-lexers.rb +8 -0
- data/lexers +0 -0
- data/lib/pygments/lexer.rb +148 -0
- data/lib/pygments/mentos.py +344 -0
- data/lib/pygments/popen.rb +389 -0
- data/lib/pygments/version.rb +3 -0
- data/lib/pygments.rb +8 -0
- data/pygments.rb.gemspec +24 -0
- data/test/test_data.c +2581 -0
- data/test/test_data.py +514 -0
- data/test/test_data_generated +2582 -0
- data/test/test_pygments.rb +276 -0
- data/vendor/custom_formatters/gitlab.py +171 -0
- data/vendor/custom_lexers/github.py +362 -0
- data/vendor/pygments-main/AUTHORS +115 -0
- data/vendor/pygments-main/CHANGES +762 -0
- data/vendor/pygments-main/LICENSE +25 -0
- data/vendor/pygments-main/MANIFEST.in +6 -0
- data/vendor/pygments-main/Makefile +59 -0
- data/vendor/pygments-main/REVISION +1 -0
- data/vendor/pygments-main/TODO +15 -0
- data/vendor/pygments-main/docs/generate.py +472 -0
- data/vendor/pygments-main/docs/pygmentize.1 +94 -0
- data/vendor/pygments-main/docs/src/api.txt +270 -0
- data/vendor/pygments-main/docs/src/authors.txt +5 -0
- data/vendor/pygments-main/docs/src/changelog.txt +5 -0
- data/vendor/pygments-main/docs/src/cmdline.txt +147 -0
- data/vendor/pygments-main/docs/src/filterdevelopment.txt +70 -0
- data/vendor/pygments-main/docs/src/filters.txt +42 -0
- data/vendor/pygments-main/docs/src/formatterdevelopment.txt +169 -0
- data/vendor/pygments-main/docs/src/formatters.txt +48 -0
- data/vendor/pygments-main/docs/src/index.txt +69 -0
- data/vendor/pygments-main/docs/src/installation.txt +71 -0
- data/vendor/pygments-main/docs/src/integrate.txt +43 -0
- data/vendor/pygments-main/docs/src/lexerdevelopment.txt +551 -0
- data/vendor/pygments-main/docs/src/lexers.txt +67 -0
- data/vendor/pygments-main/docs/src/moinmoin.txt +39 -0
- data/vendor/pygments-main/docs/src/plugins.txt +93 -0
- data/vendor/pygments-main/docs/src/quickstart.txt +202 -0
- data/vendor/pygments-main/docs/src/rstdirective.txt +22 -0
- data/vendor/pygments-main/docs/src/styles.txt +143 -0
- data/vendor/pygments-main/docs/src/tokens.txt +349 -0
- data/vendor/pygments-main/docs/src/unicode.txt +49 -0
- data/vendor/pygments-main/external/markdown-processor.py +67 -0
- data/vendor/pygments-main/external/moin-parser.py +112 -0
- data/vendor/pygments-main/external/pygments.bashcomp +38 -0
- data/vendor/pygments-main/external/rst-directive-old.py +77 -0
- data/vendor/pygments-main/external/rst-directive.py +83 -0
- data/vendor/pygments-main/ez_setup.py +276 -0
- data/vendor/pygments-main/pygmentize +7 -0
- data/vendor/pygments-main/pygments/__init__.py +91 -0
- data/vendor/pygments-main/pygments/cmdline.py +433 -0
- data/vendor/pygments-main/pygments/console.py +74 -0
- data/vendor/pygments-main/pygments/filter.py +74 -0
- data/vendor/pygments-main/pygments/filters/__init__.py +357 -0
- data/vendor/pygments-main/pygments/formatter.py +92 -0
- data/vendor/pygments-main/pygments/formatters/__init__.py +68 -0
- data/vendor/pygments-main/pygments/formatters/_mapping.py +94 -0
- data/vendor/pygments-main/pygments/formatters/bbcode.py +109 -0
- data/vendor/pygments-main/pygments/formatters/gitlab.py +171 -0
- data/vendor/pygments-main/pygments/formatters/html.py +750 -0
- data/vendor/pygments-main/pygments/formatters/img.py +553 -0
- data/vendor/pygments-main/pygments/formatters/latex.py +378 -0
- data/vendor/pygments-main/pygments/formatters/other.py +117 -0
- data/vendor/pygments-main/pygments/formatters/rtf.py +136 -0
- data/vendor/pygments-main/pygments/formatters/svg.py +154 -0
- data/vendor/pygments-main/pygments/formatters/terminal.py +112 -0
- data/vendor/pygments-main/pygments/formatters/terminal256.py +222 -0
- data/vendor/pygments-main/pygments/lexer.py +697 -0
- data/vendor/pygments-main/pygments/lexers/__init__.py +229 -0
- data/vendor/pygments-main/pygments/lexers/_asybuiltins.py +1645 -0
- data/vendor/pygments-main/pygments/lexers/_clbuiltins.py +232 -0
- data/vendor/pygments-main/pygments/lexers/_luabuiltins.py +249 -0
- data/vendor/pygments-main/pygments/lexers/_mapping.py +298 -0
- data/vendor/pygments-main/pygments/lexers/_phpbuiltins.py +3787 -0
- data/vendor/pygments-main/pygments/lexers/_postgres_builtins.py +232 -0
- data/vendor/pygments-main/pygments/lexers/_scilab_builtins.py +29 -0
- data/vendor/pygments-main/pygments/lexers/_vimbuiltins.py +3 -0
- data/vendor/pygments-main/pygments/lexers/agile.py +1803 -0
- data/vendor/pygments-main/pygments/lexers/asm.py +360 -0
- data/vendor/pygments-main/pygments/lexers/compiled.py +2891 -0
- data/vendor/pygments-main/pygments/lexers/dotnet.py +636 -0
- data/vendor/pygments-main/pygments/lexers/functional.py +1832 -0
- data/vendor/pygments-main/pygments/lexers/github.py +362 -0
- data/vendor/pygments-main/pygments/lexers/hdl.py +356 -0
- data/vendor/pygments-main/pygments/lexers/jvm.py +847 -0
- data/vendor/pygments-main/pygments/lexers/math.py +1072 -0
- data/vendor/pygments-main/pygments/lexers/other.py +3339 -0
- data/vendor/pygments-main/pygments/lexers/parsers.py +695 -0
- data/vendor/pygments-main/pygments/lexers/shell.py +361 -0
- data/vendor/pygments-main/pygments/lexers/special.py +100 -0
- data/vendor/pygments-main/pygments/lexers/sql.py +559 -0
- data/vendor/pygments-main/pygments/lexers/templates.py +1631 -0
- data/vendor/pygments-main/pygments/lexers/text.py +1753 -0
- data/vendor/pygments-main/pygments/lexers/web.py +2864 -0
- data/vendor/pygments-main/pygments/plugin.py +74 -0
- data/vendor/pygments-main/pygments/scanner.py +104 -0
- data/vendor/pygments-main/pygments/style.py +117 -0
- data/vendor/pygments-main/pygments/styles/__init__.py +70 -0
- data/vendor/pygments-main/pygments/styles/autumn.py +65 -0
- data/vendor/pygments-main/pygments/styles/borland.py +51 -0
- data/vendor/pygments-main/pygments/styles/bw.py +49 -0
- data/vendor/pygments-main/pygments/styles/colorful.py +81 -0
- data/vendor/pygments-main/pygments/styles/default.py +73 -0
- data/vendor/pygments-main/pygments/styles/emacs.py +72 -0
- data/vendor/pygments-main/pygments/styles/friendly.py +72 -0
- data/vendor/pygments-main/pygments/styles/fruity.py +42 -0
- data/vendor/pygments-main/pygments/styles/manni.py +75 -0
- data/vendor/pygments-main/pygments/styles/monokai.py +106 -0
- data/vendor/pygments-main/pygments/styles/murphy.py +80 -0
- data/vendor/pygments-main/pygments/styles/native.py +65 -0
- data/vendor/pygments-main/pygments/styles/pastie.py +75 -0
- data/vendor/pygments-main/pygments/styles/perldoc.py +69 -0
- data/vendor/pygments-main/pygments/styles/rrt.py +33 -0
- data/vendor/pygments-main/pygments/styles/tango.py +141 -0
- data/vendor/pygments-main/pygments/styles/trac.py +63 -0
- data/vendor/pygments-main/pygments/styles/vim.py +63 -0
- data/vendor/pygments-main/pygments/styles/vs.py +38 -0
- data/vendor/pygments-main/pygments/token.py +195 -0
- data/vendor/pygments-main/pygments/unistring.py +130 -0
- data/vendor/pygments-main/pygments/util.py +232 -0
- data/vendor/pygments-main/scripts/check_sources.py +242 -0
- data/vendor/pygments-main/scripts/detect_missing_analyse_text.py +30 -0
- data/vendor/pygments-main/scripts/epydoc.css +280 -0
- data/vendor/pygments-main/scripts/find_codetags.py +205 -0
- data/vendor/pygments-main/scripts/find_error.py +171 -0
- data/vendor/pygments-main/scripts/get_vimkw.py +43 -0
- data/vendor/pygments-main/scripts/pylintrc +301 -0
- data/vendor/pygments-main/scripts/reindent.py +291 -0
- data/vendor/pygments-main/scripts/vim2pygments.py +933 -0
- data/vendor/pygments-main/setup.cfg +6 -0
- data/vendor/pygments-main/setup.py +88 -0
- data/vendor/pygments-main/tests/dtds/HTML4-f.dtd +37 -0
- data/vendor/pygments-main/tests/dtds/HTML4-s.dtd +869 -0
- data/vendor/pygments-main/tests/dtds/HTML4.dcl +88 -0
- data/vendor/pygments-main/tests/dtds/HTML4.dtd +1092 -0
- data/vendor/pygments-main/tests/dtds/HTML4.soc +9 -0
- data/vendor/pygments-main/tests/dtds/HTMLlat1.ent +195 -0
- data/vendor/pygments-main/tests/dtds/HTMLspec.ent +77 -0
- data/vendor/pygments-main/tests/dtds/HTMLsym.ent +241 -0
- data/vendor/pygments-main/tests/examplefiles/ANTLRv3.g +608 -0
- data/vendor/pygments-main/tests/examplefiles/AcidStateAdvanced.hs +209 -0
- data/vendor/pygments-main/tests/examplefiles/AlternatingGroup.mu +102 -0
- data/vendor/pygments-main/tests/examplefiles/CPDictionary.j +611 -0
- data/vendor/pygments-main/tests/examplefiles/Constants.mo +158 -0
- data/vendor/pygments-main/tests/examplefiles/DancingSudoku.lhs +411 -0
- data/vendor/pygments-main/tests/examplefiles/Errors.scala +18 -0
- data/vendor/pygments-main/tests/examplefiles/File.hy +174 -0
- data/vendor/pygments-main/tests/examplefiles/Intro.java +1660 -0
- data/vendor/pygments-main/tests/examplefiles/Makefile +1131 -0
- data/vendor/pygments-main/tests/examplefiles/Object.st +4394 -0
- data/vendor/pygments-main/tests/examplefiles/OrderedMap.hx +584 -0
- data/vendor/pygments-main/tests/examplefiles/SmallCheck.hs +378 -0
- data/vendor/pygments-main/tests/examplefiles/Sorting.mod +470 -0
- data/vendor/pygments-main/tests/examplefiles/Sudoku.lhs +382 -0
- data/vendor/pygments-main/tests/examplefiles/addressbook.proto +30 -0
- data/vendor/pygments-main/tests/examplefiles/antlr_throws +1 -0
- data/vendor/pygments-main/tests/examplefiles/apache2.conf +393 -0
- data/vendor/pygments-main/tests/examplefiles/as3_test.as +143 -0
- data/vendor/pygments-main/tests/examplefiles/as3_test2.as +46 -0
- data/vendor/pygments-main/tests/examplefiles/as3_test3.as +3 -0
- data/vendor/pygments-main/tests/examplefiles/aspx-cs_example +27 -0
- data/vendor/pygments-main/tests/examplefiles/badcase.java +2 -0
- data/vendor/pygments-main/tests/examplefiles/batchfile.bat +49 -0
- data/vendor/pygments-main/tests/examplefiles/boot-9.scm +1557 -0
- data/vendor/pygments-main/tests/examplefiles/cells.ps +515 -0
- data/vendor/pygments-main/tests/examplefiles/ceval.c +2604 -0
- data/vendor/pygments-main/tests/examplefiles/cheetah_example.html +13 -0
- data/vendor/pygments-main/tests/examplefiles/classes.dylan +40 -0
- data/vendor/pygments-main/tests/examplefiles/condensed_ruby.rb +10 -0
- data/vendor/pygments-main/tests/examplefiles/coq_RelationClasses +447 -0
- data/vendor/pygments-main/tests/examplefiles/database.pytb +20 -0
- data/vendor/pygments-main/tests/examplefiles/de.MoinMoin.po +2461 -0
- data/vendor/pygments-main/tests/examplefiles/demo.ahk +181 -0
- data/vendor/pygments-main/tests/examplefiles/demo.cfm +38 -0
- data/vendor/pygments-main/tests/examplefiles/django_sample.html+django +68 -0
- data/vendor/pygments-main/tests/examplefiles/dwarf.cw +17 -0
- data/vendor/pygments-main/tests/examplefiles/erl_session +10 -0
- data/vendor/pygments-main/tests/examplefiles/escape_semicolon.clj +1 -0
- data/vendor/pygments-main/tests/examplefiles/evil_regex.js +48 -0
- data/vendor/pygments-main/tests/examplefiles/example.c +2080 -0
- data/vendor/pygments-main/tests/examplefiles/example.cls +15 -0
- data/vendor/pygments-main/tests/examplefiles/example.cpp +2363 -0
- data/vendor/pygments-main/tests/examplefiles/example.gs +106 -0
- data/vendor/pygments-main/tests/examplefiles/example.gst +7 -0
- data/vendor/pygments-main/tests/examplefiles/example.kt +47 -0
- data/vendor/pygments-main/tests/examplefiles/example.lua +250 -0
- data/vendor/pygments-main/tests/examplefiles/example.moo +26 -0
- data/vendor/pygments-main/tests/examplefiles/example.moon +629 -0
- data/vendor/pygments-main/tests/examplefiles/example.nim +1010 -0
- data/vendor/pygments-main/tests/examplefiles/example.ns2 +69 -0
- data/vendor/pygments-main/tests/examplefiles/example.p +34 -0
- data/vendor/pygments-main/tests/examplefiles/example.pas +2708 -0
- data/vendor/pygments-main/tests/examplefiles/example.rb +1852 -0
- data/vendor/pygments-main/tests/examplefiles/example.rhtml +561 -0
- data/vendor/pygments-main/tests/examplefiles/example.sh-session +19 -0
- data/vendor/pygments-main/tests/examplefiles/example.sml +156 -0
- data/vendor/pygments-main/tests/examplefiles/example.snobol +15 -0
- data/vendor/pygments-main/tests/examplefiles/example.tea +34 -0
- data/vendor/pygments-main/tests/examplefiles/example.u +548 -0
- data/vendor/pygments-main/tests/examplefiles/example.weechatlog +9 -0
- data/vendor/pygments-main/tests/examplefiles/example.xhtml +376 -0
- data/vendor/pygments-main/tests/examplefiles/example.yaml +302 -0
- data/vendor/pygments-main/tests/examplefiles/example2.aspx +29 -0
- data/vendor/pygments-main/tests/examplefiles/example_elixir.ex +363 -0
- data/vendor/pygments-main/tests/examplefiles/example_file.fy +128 -0
- data/vendor/pygments-main/tests/examplefiles/firefox.mak +586 -0
- data/vendor/pygments-main/tests/examplefiles/flipflop.sv +19 -0
- data/vendor/pygments-main/tests/examplefiles/foo.sce +6 -0
- data/vendor/pygments-main/tests/examplefiles/format.ml +1213 -0
- data/vendor/pygments-main/tests/examplefiles/fucked_up.rb +77 -0
- data/vendor/pygments-main/tests/examplefiles/function.mu +1 -0
- data/vendor/pygments-main/tests/examplefiles/functional.rst +1472 -0
- data/vendor/pygments-main/tests/examplefiles/genclass.clj +510 -0
- data/vendor/pygments-main/tests/examplefiles/genshi_example.xml+genshi +193 -0
- data/vendor/pygments-main/tests/examplefiles/genshitext_example.genshitext +33 -0
- data/vendor/pygments-main/tests/examplefiles/glsl.frag +7 -0
- data/vendor/pygments-main/tests/examplefiles/glsl.vert +13 -0
- data/vendor/pygments-main/tests/examplefiles/html+php_faulty.php +1 -0
- data/vendor/pygments-main/tests/examplefiles/http_request_example +14 -0
- data/vendor/pygments-main/tests/examplefiles/http_response_example +27 -0
- data/vendor/pygments-main/tests/examplefiles/import.hs +4 -0
- data/vendor/pygments-main/tests/examplefiles/intro.ik +24 -0
- data/vendor/pygments-main/tests/examplefiles/ints.php +10 -0
- data/vendor/pygments-main/tests/examplefiles/intsyn.fun +675 -0
- data/vendor/pygments-main/tests/examplefiles/intsyn.sig +286 -0
- data/vendor/pygments-main/tests/examplefiles/irb_heredoc +8 -0
- data/vendor/pygments-main/tests/examplefiles/irc.lsp +214 -0
- data/vendor/pygments-main/tests/examplefiles/java.properties +16 -0
- data/vendor/pygments-main/tests/examplefiles/jbst_example1.jbst +28 -0
- data/vendor/pygments-main/tests/examplefiles/jbst_example2.jbst +45 -0
- data/vendor/pygments-main/tests/examplefiles/jinjadesignerdoc.rst +713 -0
- data/vendor/pygments-main/tests/examplefiles/lighttpd_config.conf +13 -0
- data/vendor/pygments-main/tests/examplefiles/linecontinuation.py +47 -0
- data/vendor/pygments-main/tests/examplefiles/ltmain.sh +2849 -0
- data/vendor/pygments-main/tests/examplefiles/main.cmake +42 -0
- data/vendor/pygments-main/tests/examplefiles/markdown.lsp +679 -0
- data/vendor/pygments-main/tests/examplefiles/matlab_noreturn +3 -0
- data/vendor/pygments-main/tests/examplefiles/matlab_sample +27 -0
- data/vendor/pygments-main/tests/examplefiles/matlabsession_sample.txt +37 -0
- data/vendor/pygments-main/tests/examplefiles/minimal.ns2 +4 -0
- data/vendor/pygments-main/tests/examplefiles/moin_SyntaxReference.txt +340 -0
- data/vendor/pygments-main/tests/examplefiles/multiline_regexes.rb +38 -0
- data/vendor/pygments-main/tests/examplefiles/nasm_aoutso.asm +96 -0
- data/vendor/pygments-main/tests/examplefiles/nasm_objexe.asm +30 -0
- data/vendor/pygments-main/tests/examplefiles/nemerle_sample.n +87 -0
- data/vendor/pygments-main/tests/examplefiles/nginx_nginx.conf +118 -0
- data/vendor/pygments-main/tests/examplefiles/numbers.c +12 -0
- data/vendor/pygments-main/tests/examplefiles/objc_example.m +25 -0
- data/vendor/pygments-main/tests/examplefiles/objc_example2.m +24 -0
- data/vendor/pygments-main/tests/examplefiles/perl_misc +62 -0
- data/vendor/pygments-main/tests/examplefiles/perl_perl5db +998 -0
- data/vendor/pygments-main/tests/examplefiles/perl_regex-delims +120 -0
- data/vendor/pygments-main/tests/examplefiles/perlfunc.1 +856 -0
- data/vendor/pygments-main/tests/examplefiles/phpcomplete.vim +567 -0
- data/vendor/pygments-main/tests/examplefiles/pleac.in.rb +1223 -0
- data/vendor/pygments-main/tests/examplefiles/postgresql_test.txt +47 -0
- data/vendor/pygments-main/tests/examplefiles/pppoe.applescript +10 -0
- data/vendor/pygments-main/tests/examplefiles/psql_session.txt +122 -0
- data/vendor/pygments-main/tests/examplefiles/py3_test.txt +2 -0
- data/vendor/pygments-main/tests/examplefiles/pycon_test.pycon +14 -0
- data/vendor/pygments-main/tests/examplefiles/pytb_test2.pytb +2 -0
- data/vendor/pygments-main/tests/examplefiles/python25-bsd.mak +234 -0
- data/vendor/pygments-main/tests/examplefiles/qsort.prolog +13 -0
- data/vendor/pygments-main/tests/examplefiles/r-console-transcript.Rout +38 -0
- data/vendor/pygments-main/tests/examplefiles/ragel-cpp_rlscan +280 -0
- data/vendor/pygments-main/tests/examplefiles/ragel-cpp_snippet +2 -0
- data/vendor/pygments-main/tests/examplefiles/regex.js +22 -0
- data/vendor/pygments-main/tests/examplefiles/reversi.lsp +427 -0
- data/vendor/pygments-main/tests/examplefiles/ruby_func_def.rb +11 -0
- data/vendor/pygments-main/tests/examplefiles/scilab.sci +30 -0
- data/vendor/pygments-main/tests/examplefiles/sibling.prolog +19 -0
- data/vendor/pygments-main/tests/examplefiles/simple.md +747 -0
- data/vendor/pygments-main/tests/examplefiles/smarty_example.html +209 -0
- data/vendor/pygments-main/tests/examplefiles/source.lgt +343 -0
- data/vendor/pygments-main/tests/examplefiles/sources.list +62 -0
- data/vendor/pygments-main/tests/examplefiles/sphere.pov +18 -0
- data/vendor/pygments-main/tests/examplefiles/sqlite3.sqlite3-console +27 -0
- data/vendor/pygments-main/tests/examplefiles/squid.conf +30 -0
- data/vendor/pygments-main/tests/examplefiles/string.jl +1031 -0
- data/vendor/pygments-main/tests/examplefiles/string_delimiters.d +21 -0
- data/vendor/pygments-main/tests/examplefiles/stripheredoc.sh +3 -0
- data/vendor/pygments-main/tests/examplefiles/test.R +119 -0
- data/vendor/pygments-main/tests/examplefiles/test.adb +211 -0
- data/vendor/pygments-main/tests/examplefiles/test.asy +131 -0
- data/vendor/pygments-main/tests/examplefiles/test.awk +121 -0
- data/vendor/pygments-main/tests/examplefiles/test.bas +29 -0
- data/vendor/pygments-main/tests/examplefiles/test.bmx +145 -0
- data/vendor/pygments-main/tests/examplefiles/test.boo +39 -0
- data/vendor/pygments-main/tests/examplefiles/test.bro +250 -0
- data/vendor/pygments-main/tests/examplefiles/test.cs +374 -0
- data/vendor/pygments-main/tests/examplefiles/test.css +54 -0
- data/vendor/pygments-main/tests/examplefiles/test.d +135 -0
- data/vendor/pygments-main/tests/examplefiles/test.dart +23 -0
- data/vendor/pygments-main/tests/examplefiles/test.dtd +89 -0
- data/vendor/pygments-main/tests/examplefiles/test.ec +605 -0
- data/vendor/pygments-main/tests/examplefiles/test.ecl +58 -0
- data/vendor/pygments-main/tests/examplefiles/test.eh +315 -0
- data/vendor/pygments-main/tests/examplefiles/test.erl +169 -0
- data/vendor/pygments-main/tests/examplefiles/test.evoque +33 -0
- data/vendor/pygments-main/tests/examplefiles/test.fan +818 -0
- data/vendor/pygments-main/tests/examplefiles/test.flx +57 -0
- data/vendor/pygments-main/tests/examplefiles/test.gdc +13 -0
- data/vendor/pygments-main/tests/examplefiles/test.groovy +97 -0
- data/vendor/pygments-main/tests/examplefiles/test.html +339 -0
- data/vendor/pygments-main/tests/examplefiles/test.ini +10 -0
- data/vendor/pygments-main/tests/examplefiles/test.java +653 -0
- data/vendor/pygments-main/tests/examplefiles/test.jsp +24 -0
- data/vendor/pygments-main/tests/examplefiles/test.maql +45 -0
- data/vendor/pygments-main/tests/examplefiles/test.mod +374 -0
- data/vendor/pygments-main/tests/examplefiles/test.moo +51 -0
- data/vendor/pygments-main/tests/examplefiles/test.myt +166 -0
- data/vendor/pygments-main/tests/examplefiles/test.nim +93 -0
- data/vendor/pygments-main/tests/examplefiles/test.pas +743 -0
- data/vendor/pygments-main/tests/examplefiles/test.php +505 -0
- data/vendor/pygments-main/tests/examplefiles/test.plot +333 -0
- data/vendor/pygments-main/tests/examplefiles/test.ps1 +108 -0
- data/vendor/pygments-main/tests/examplefiles/test.pypylog +1839 -0
- data/vendor/pygments-main/tests/examplefiles/test.r3 +94 -0
- data/vendor/pygments-main/tests/examplefiles/test.rb +177 -0
- data/vendor/pygments-main/tests/examplefiles/test.rhtml +43 -0
- data/vendor/pygments-main/tests/examplefiles/test.scaml +8 -0
- data/vendor/pygments-main/tests/examplefiles/test.ssp +12 -0
- data/vendor/pygments-main/tests/examplefiles/test.tcsh +830 -0
- data/vendor/pygments-main/tests/examplefiles/test.vb +407 -0
- data/vendor/pygments-main/tests/examplefiles/test.vhdl +161 -0
- data/vendor/pygments-main/tests/examplefiles/test.xqy +138 -0
- data/vendor/pygments-main/tests/examplefiles/test.xsl +23 -0
- data/vendor/pygments-main/tests/examplefiles/truncated.pytb +15 -0
- data/vendor/pygments-main/tests/examplefiles/type.lisp +1202 -0
- data/vendor/pygments-main/tests/examplefiles/underscore.coffee +603 -0
- data/vendor/pygments-main/tests/examplefiles/unicode.applescript +5 -0
- data/vendor/pygments-main/tests/examplefiles/unicodedoc.py +11 -0
- data/vendor/pygments-main/tests/examplefiles/webkit-transition.css +3 -0
- data/vendor/pygments-main/tests/examplefiles/while.pov +13 -0
- data/vendor/pygments-main/tests/examplefiles/wiki.factor +384 -0
- data/vendor/pygments-main/tests/examplefiles/xml_example +1897 -0
- data/vendor/pygments-main/tests/examplefiles/zmlrpc.f90 +798 -0
- data/vendor/pygments-main/tests/old_run.py +138 -0
- data/vendor/pygments-main/tests/run.py +48 -0
- data/vendor/pygments-main/tests/support.py +15 -0
- data/vendor/pygments-main/tests/test_basic_api.py +294 -0
- data/vendor/pygments-main/tests/test_clexer.py +31 -0
- data/vendor/pygments-main/tests/test_cmdline.py +105 -0
- data/vendor/pygments-main/tests/test_examplefiles.py +97 -0
- data/vendor/pygments-main/tests/test_html_formatter.py +162 -0
- data/vendor/pygments-main/tests/test_latex_formatter.py +55 -0
- data/vendor/pygments-main/tests/test_perllexer.py +137 -0
- data/vendor/pygments-main/tests/test_regexlexer.py +47 -0
- data/vendor/pygments-main/tests/test_token.py +46 -0
- data/vendor/pygments-main/tests/test_using_api.py +40 -0
- data/vendor/pygments-main/tests/test_util.py +116 -0
- data/vendor/simplejson/.gitignore +10 -0
- data/vendor/simplejson/.travis.yml +5 -0
- data/vendor/simplejson/CHANGES.txt +291 -0
- data/vendor/simplejson/LICENSE.txt +19 -0
- data/vendor/simplejson/MANIFEST.in +5 -0
- data/vendor/simplejson/README.rst +19 -0
- data/vendor/simplejson/conf.py +179 -0
- data/vendor/simplejson/index.rst +628 -0
- data/vendor/simplejson/scripts/make_docs.py +18 -0
- data/vendor/simplejson/setup.py +104 -0
- data/vendor/simplejson/simplejson/__init__.py +510 -0
- data/vendor/simplejson/simplejson/_speedups.c +2745 -0
- data/vendor/simplejson/simplejson/decoder.py +425 -0
- data/vendor/simplejson/simplejson/encoder.py +567 -0
- data/vendor/simplejson/simplejson/ordered_dict.py +119 -0
- data/vendor/simplejson/simplejson/scanner.py +77 -0
- data/vendor/simplejson/simplejson/tests/__init__.py +67 -0
- data/vendor/simplejson/simplejson/tests/test_bigint_as_string.py +55 -0
- data/vendor/simplejson/simplejson/tests/test_check_circular.py +30 -0
- data/vendor/simplejson/simplejson/tests/test_decimal.py +66 -0
- data/vendor/simplejson/simplejson/tests/test_decode.py +83 -0
- data/vendor/simplejson/simplejson/tests/test_default.py +9 -0
- data/vendor/simplejson/simplejson/tests/test_dump.py +67 -0
- data/vendor/simplejson/simplejson/tests/test_encode_basestring_ascii.py +46 -0
- data/vendor/simplejson/simplejson/tests/test_encode_for_html.py +32 -0
- data/vendor/simplejson/simplejson/tests/test_errors.py +34 -0
- data/vendor/simplejson/simplejson/tests/test_fail.py +91 -0
- data/vendor/simplejson/simplejson/tests/test_float.py +19 -0
- data/vendor/simplejson/simplejson/tests/test_indent.py +86 -0
- data/vendor/simplejson/simplejson/tests/test_item_sort_key.py +20 -0
- data/vendor/simplejson/simplejson/tests/test_namedtuple.py +121 -0
- data/vendor/simplejson/simplejson/tests/test_pass1.py +76 -0
- data/vendor/simplejson/simplejson/tests/test_pass2.py +14 -0
- data/vendor/simplejson/simplejson/tests/test_pass3.py +20 -0
- data/vendor/simplejson/simplejson/tests/test_recursion.py +67 -0
- data/vendor/simplejson/simplejson/tests/test_scanstring.py +117 -0
- data/vendor/simplejson/simplejson/tests/test_separators.py +42 -0
- data/vendor/simplejson/simplejson/tests/test_speedups.py +20 -0
- data/vendor/simplejson/simplejson/tests/test_tuple.py +49 -0
- data/vendor/simplejson/simplejson/tests/test_unicode.py +109 -0
- data/vendor/simplejson/simplejson/tool.py +39 -0
- metadata +492 -0
@@ -0,0 +1,697 @@
|
|
1
|
+
# -*- coding: utf-8 -*-
|
2
|
+
"""
|
3
|
+
pygments.lexer
|
4
|
+
~~~~~~~~~~~~~~
|
5
|
+
|
6
|
+
Base lexer classes.
|
7
|
+
|
8
|
+
:copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
|
9
|
+
:license: BSD, see LICENSE for details.
|
10
|
+
"""
|
11
|
+
import re
|
12
|
+
|
13
|
+
from pygments.filter import apply_filters, Filter
|
14
|
+
from pygments.filters import get_filter_by_name
|
15
|
+
from pygments.token import Error, Text, Other, _TokenType
|
16
|
+
from pygments.util import get_bool_opt, get_int_opt, get_list_opt, \
|
17
|
+
make_analysator
|
18
|
+
|
19
|
+
|
20
|
+
__all__ = ['Lexer', 'RegexLexer', 'ExtendedRegexLexer', 'DelegatingLexer',
|
21
|
+
'LexerContext', 'include', 'bygroups', 'using', 'this']
|
22
|
+
|
23
|
+
|
24
|
+
_encoding_map = [('\xef\xbb\xbf', 'utf-8'),
|
25
|
+
('\xff\xfe\0\0', 'utf-32'),
|
26
|
+
('\0\0\xfe\xff', 'utf-32be'),
|
27
|
+
('\xff\xfe', 'utf-16'),
|
28
|
+
('\xfe\xff', 'utf-16be')]
|
29
|
+
|
30
|
+
_default_analyse = staticmethod(lambda x: 0.0)
|
31
|
+
|
32
|
+
|
33
|
+
class LexerMeta(type):
|
34
|
+
"""
|
35
|
+
This metaclass automagically converts ``analyse_text`` methods into
|
36
|
+
static methods which always return float values.
|
37
|
+
"""
|
38
|
+
|
39
|
+
def __new__(cls, name, bases, d):
|
40
|
+
if 'analyse_text' in d:
|
41
|
+
d['analyse_text'] = make_analysator(d['analyse_text'])
|
42
|
+
return type.__new__(cls, name, bases, d)
|
43
|
+
|
44
|
+
|
45
|
+
class Lexer(object):
|
46
|
+
"""
|
47
|
+
Lexer for a specific language.
|
48
|
+
|
49
|
+
Basic options recognized:
|
50
|
+
``stripnl``
|
51
|
+
Strip leading and trailing newlines from the input (default: True).
|
52
|
+
``stripall``
|
53
|
+
Strip all leading and trailing whitespace from the input
|
54
|
+
(default: False).
|
55
|
+
``ensurenl``
|
56
|
+
Make sure that the input ends with a newline (default: True). This
|
57
|
+
is required for some lexers that consume input linewise.
|
58
|
+
*New in Pygments 1.3.*
|
59
|
+
``tabsize``
|
60
|
+
If given and greater than 0, expand tabs in the input (default: 0).
|
61
|
+
``encoding``
|
62
|
+
If given, must be an encoding name. This encoding will be used to
|
63
|
+
convert the input string to Unicode, if it is not already a Unicode
|
64
|
+
string (default: ``'latin1'``).
|
65
|
+
Can also be ``'guess'`` to use a simple UTF-8 / Latin1 detection, or
|
66
|
+
``'chardet'`` to use the chardet library, if it is installed.
|
67
|
+
"""
|
68
|
+
|
69
|
+
#: Name of the lexer
|
70
|
+
name = None
|
71
|
+
|
72
|
+
#: Shortcuts for the lexer
|
73
|
+
aliases = []
|
74
|
+
|
75
|
+
#: fn match rules
|
76
|
+
filenames = []
|
77
|
+
|
78
|
+
#: fn alias filenames
|
79
|
+
alias_filenames = []
|
80
|
+
|
81
|
+
#: mime types
|
82
|
+
mimetypes = []
|
83
|
+
|
84
|
+
__metaclass__ = LexerMeta
|
85
|
+
|
86
|
+
def __init__(self, **options):
|
87
|
+
self.options = options
|
88
|
+
self.stripnl = get_bool_opt(options, 'stripnl', True)
|
89
|
+
self.stripall = get_bool_opt(options, 'stripall', False)
|
90
|
+
self.ensurenl = get_bool_opt(options, 'ensurenl', True)
|
91
|
+
self.tabsize = get_int_opt(options, 'tabsize', 0)
|
92
|
+
self.encoding = options.get('encoding', 'latin1')
|
93
|
+
# self.encoding = options.get('inencoding', None) or self.encoding
|
94
|
+
self.filters = []
|
95
|
+
for filter_ in get_list_opt(options, 'filters', ()):
|
96
|
+
self.add_filter(filter_)
|
97
|
+
|
98
|
+
def __repr__(self):
|
99
|
+
if self.options:
|
100
|
+
return '<pygments.lexers.%s with %r>' % (self.__class__.__name__,
|
101
|
+
self.options)
|
102
|
+
else:
|
103
|
+
return '<pygments.lexers.%s>' % self.__class__.__name__
|
104
|
+
|
105
|
+
def add_filter(self, filter_, **options):
|
106
|
+
"""
|
107
|
+
Add a new stream filter to this lexer.
|
108
|
+
"""
|
109
|
+
if not isinstance(filter_, Filter):
|
110
|
+
filter_ = get_filter_by_name(filter_, **options)
|
111
|
+
self.filters.append(filter_)
|
112
|
+
|
113
|
+
def analyse_text(text):
|
114
|
+
"""
|
115
|
+
Has to return a float between ``0`` and ``1`` that indicates
|
116
|
+
if a lexer wants to highlight this text. Used by ``guess_lexer``.
|
117
|
+
If this method returns ``0`` it won't highlight it in any case, if
|
118
|
+
it returns ``1`` highlighting with this lexer is guaranteed.
|
119
|
+
|
120
|
+
The `LexerMeta` metaclass automatically wraps this function so
|
121
|
+
that it works like a static method (no ``self`` or ``cls``
|
122
|
+
parameter) and the return value is automatically converted to
|
123
|
+
`float`. If the return value is an object that is boolean `False`
|
124
|
+
it's the same as if the return values was ``0.0``.
|
125
|
+
"""
|
126
|
+
|
127
|
+
def get_tokens(self, text, unfiltered=False):
|
128
|
+
"""
|
129
|
+
Return an iterable of (tokentype, value) pairs generated from
|
130
|
+
`text`. If `unfiltered` is set to `True`, the filtering mechanism
|
131
|
+
is bypassed even if filters are defined.
|
132
|
+
|
133
|
+
Also preprocess the text, i.e. expand tabs and strip it if
|
134
|
+
wanted and applies registered filters.
|
135
|
+
"""
|
136
|
+
if not isinstance(text, unicode):
|
137
|
+
if self.encoding == 'guess':
|
138
|
+
try:
|
139
|
+
text = text.decode('utf-8')
|
140
|
+
if text.startswith(u'\ufeff'):
|
141
|
+
text = text[len(u'\ufeff'):]
|
142
|
+
except UnicodeDecodeError:
|
143
|
+
text = text.decode('latin1')
|
144
|
+
elif self.encoding == 'chardet':
|
145
|
+
try:
|
146
|
+
import chardet
|
147
|
+
except ImportError:
|
148
|
+
raise ImportError('To enable chardet encoding guessing, '
|
149
|
+
'please install the chardet library '
|
150
|
+
'from http://chardet.feedparser.org/')
|
151
|
+
# check for BOM first
|
152
|
+
decoded = None
|
153
|
+
for bom, encoding in _encoding_map:
|
154
|
+
if text.startswith(bom):
|
155
|
+
decoded = unicode(text[len(bom):], encoding,
|
156
|
+
errors='replace')
|
157
|
+
break
|
158
|
+
# no BOM found, so use chardet
|
159
|
+
if decoded is None:
|
160
|
+
enc = chardet.detect(text[:1024]) # Guess using first 1KB
|
161
|
+
decoded = unicode(text, enc.get('encoding') or 'utf-8',
|
162
|
+
errors='replace')
|
163
|
+
text = decoded
|
164
|
+
else:
|
165
|
+
text = text.decode(self.encoding)
|
166
|
+
# text now *is* a unicode string
|
167
|
+
text = text.replace('\r\n', '\n')
|
168
|
+
text = text.replace('\r', '\n')
|
169
|
+
if self.stripall:
|
170
|
+
text = text.strip()
|
171
|
+
elif self.stripnl:
|
172
|
+
text = text.strip('\n')
|
173
|
+
if self.tabsize > 0:
|
174
|
+
text = text.expandtabs(self.tabsize)
|
175
|
+
if self.ensurenl and not text.endswith('\n'):
|
176
|
+
text += '\n'
|
177
|
+
|
178
|
+
def streamer():
|
179
|
+
for i, t, v in self.get_tokens_unprocessed(text):
|
180
|
+
yield t, v
|
181
|
+
stream = streamer()
|
182
|
+
if not unfiltered:
|
183
|
+
stream = apply_filters(stream, self.filters, self)
|
184
|
+
return stream
|
185
|
+
|
186
|
+
def get_tokens_unprocessed(self, text):
|
187
|
+
"""
|
188
|
+
Return an iterable of (tokentype, value) pairs.
|
189
|
+
In subclasses, implement this method as a generator to
|
190
|
+
maximize effectiveness.
|
191
|
+
"""
|
192
|
+
raise NotImplementedError
|
193
|
+
|
194
|
+
|
195
|
+
class DelegatingLexer(Lexer):
|
196
|
+
"""
|
197
|
+
This lexer takes two lexer as arguments. A root lexer and
|
198
|
+
a language lexer. First everything is scanned using the language
|
199
|
+
lexer, afterwards all ``Other`` tokens are lexed using the root
|
200
|
+
lexer.
|
201
|
+
|
202
|
+
The lexers from the ``template`` lexer package use this base lexer.
|
203
|
+
"""
|
204
|
+
|
205
|
+
def __init__(self, _root_lexer, _language_lexer, _needle=Other, **options):
|
206
|
+
self.root_lexer = _root_lexer(**options)
|
207
|
+
self.language_lexer = _language_lexer(**options)
|
208
|
+
self.needle = _needle
|
209
|
+
Lexer.__init__(self, **options)
|
210
|
+
|
211
|
+
def get_tokens_unprocessed(self, text):
|
212
|
+
buffered = ''
|
213
|
+
insertions = []
|
214
|
+
lng_buffer = []
|
215
|
+
for i, t, v in self.language_lexer.get_tokens_unprocessed(text):
|
216
|
+
if t is self.needle:
|
217
|
+
if lng_buffer:
|
218
|
+
insertions.append((len(buffered), lng_buffer))
|
219
|
+
lng_buffer = []
|
220
|
+
buffered += v
|
221
|
+
else:
|
222
|
+
lng_buffer.append((i, t, v))
|
223
|
+
if lng_buffer:
|
224
|
+
insertions.append((len(buffered), lng_buffer))
|
225
|
+
return do_insertions(insertions,
|
226
|
+
self.root_lexer.get_tokens_unprocessed(buffered))
|
227
|
+
|
228
|
+
|
229
|
+
#-------------------------------------------------------------------------------
|
230
|
+
# RegexLexer and ExtendedRegexLexer
|
231
|
+
#
|
232
|
+
|
233
|
+
|
234
|
+
class include(str):
|
235
|
+
"""
|
236
|
+
Indicates that a state should include rules from another state.
|
237
|
+
"""
|
238
|
+
pass
|
239
|
+
|
240
|
+
|
241
|
+
class combined(tuple):
|
242
|
+
"""
|
243
|
+
Indicates a state combined from multiple states.
|
244
|
+
"""
|
245
|
+
|
246
|
+
def __new__(cls, *args):
|
247
|
+
return tuple.__new__(cls, args)
|
248
|
+
|
249
|
+
def __init__(self, *args):
|
250
|
+
# tuple.__init__ doesn't do anything
|
251
|
+
pass
|
252
|
+
|
253
|
+
|
254
|
+
class _PseudoMatch(object):
|
255
|
+
"""
|
256
|
+
A pseudo match object constructed from a string.
|
257
|
+
"""
|
258
|
+
|
259
|
+
def __init__(self, start, text):
|
260
|
+
self._text = text
|
261
|
+
self._start = start
|
262
|
+
|
263
|
+
def start(self, arg=None):
|
264
|
+
return self._start
|
265
|
+
|
266
|
+
def end(self, arg=None):
|
267
|
+
return self._start + len(self._text)
|
268
|
+
|
269
|
+
def group(self, arg=None):
|
270
|
+
if arg:
|
271
|
+
raise IndexError('No such group')
|
272
|
+
return self._text
|
273
|
+
|
274
|
+
def groups(self):
|
275
|
+
return (self._text,)
|
276
|
+
|
277
|
+
def groupdict(self):
|
278
|
+
return {}
|
279
|
+
|
280
|
+
|
281
|
+
def bygroups(*args):
|
282
|
+
"""
|
283
|
+
Callback that yields multiple actions for each group in the match.
|
284
|
+
"""
|
285
|
+
def callback(lexer, match, ctx=None):
|
286
|
+
for i, action in enumerate(args):
|
287
|
+
if action is None:
|
288
|
+
continue
|
289
|
+
elif type(action) is _TokenType:
|
290
|
+
data = match.group(i + 1)
|
291
|
+
if data:
|
292
|
+
yield match.start(i + 1), action, data
|
293
|
+
else:
|
294
|
+
data = match.group(i + 1)
|
295
|
+
if data is not None:
|
296
|
+
if ctx:
|
297
|
+
ctx.pos = match.start(i + 1)
|
298
|
+
for item in action(lexer, _PseudoMatch(match.start(i + 1),
|
299
|
+
data), ctx):
|
300
|
+
if item:
|
301
|
+
yield item
|
302
|
+
if ctx:
|
303
|
+
ctx.pos = match.end()
|
304
|
+
return callback
|
305
|
+
|
306
|
+
|
307
|
+
class _This(object):
|
308
|
+
"""
|
309
|
+
Special singleton used for indicating the caller class.
|
310
|
+
Used by ``using``.
|
311
|
+
"""
|
312
|
+
this = _This()
|
313
|
+
|
314
|
+
|
315
|
+
def using(_other, **kwargs):
|
316
|
+
"""
|
317
|
+
Callback that processes the match with a different lexer.
|
318
|
+
|
319
|
+
The keyword arguments are forwarded to the lexer, except `state` which
|
320
|
+
is handled separately.
|
321
|
+
|
322
|
+
`state` specifies the state that the new lexer will start in, and can
|
323
|
+
be an enumerable such as ('root', 'inline', 'string') or a simple
|
324
|
+
string which is assumed to be on top of the root state.
|
325
|
+
|
326
|
+
Note: For that to work, `_other` must not be an `ExtendedRegexLexer`.
|
327
|
+
"""
|
328
|
+
gt_kwargs = {}
|
329
|
+
if 'state' in kwargs:
|
330
|
+
s = kwargs.pop('state')
|
331
|
+
if isinstance(s, (list, tuple)):
|
332
|
+
gt_kwargs['stack'] = s
|
333
|
+
else:
|
334
|
+
gt_kwargs['stack'] = ('root', s)
|
335
|
+
|
336
|
+
if _other is this:
|
337
|
+
def callback(lexer, match, ctx=None):
|
338
|
+
# if keyword arguments are given the callback
|
339
|
+
# function has to create a new lexer instance
|
340
|
+
if kwargs:
|
341
|
+
# XXX: cache that somehow
|
342
|
+
kwargs.update(lexer.options)
|
343
|
+
lx = lexer.__class__(**kwargs)
|
344
|
+
else:
|
345
|
+
lx = lexer
|
346
|
+
s = match.start()
|
347
|
+
for i, t, v in lx.get_tokens_unprocessed(match.group(), **gt_kwargs):
|
348
|
+
yield i + s, t, v
|
349
|
+
if ctx:
|
350
|
+
ctx.pos = match.end()
|
351
|
+
else:
|
352
|
+
def callback(lexer, match, ctx=None):
|
353
|
+
# XXX: cache that somehow
|
354
|
+
kwargs.update(lexer.options)
|
355
|
+
lx = _other(**kwargs)
|
356
|
+
|
357
|
+
s = match.start()
|
358
|
+
for i, t, v in lx.get_tokens_unprocessed(match.group(), **gt_kwargs):
|
359
|
+
yield i + s, t, v
|
360
|
+
if ctx:
|
361
|
+
ctx.pos = match.end()
|
362
|
+
return callback
|
363
|
+
|
364
|
+
|
365
|
+
class RegexLexerMeta(LexerMeta):
|
366
|
+
"""
|
367
|
+
Metaclass for RegexLexer, creates the self._tokens attribute from
|
368
|
+
self.tokens on the first instantiation.
|
369
|
+
"""
|
370
|
+
|
371
|
+
def _process_regex(cls, regex, rflags):
|
372
|
+
"""Preprocess the regular expression component of a token definition."""
|
373
|
+
return re.compile(regex, rflags).match
|
374
|
+
|
375
|
+
def _process_token(cls, token):
|
376
|
+
"""Preprocess the token component of a token definition."""
|
377
|
+
assert type(token) is _TokenType or callable(token), \
|
378
|
+
'token type must be simple type or callable, not %r' % (token,)
|
379
|
+
return token
|
380
|
+
|
381
|
+
def _process_new_state(cls, new_state, unprocessed, processed):
|
382
|
+
"""Preprocess the state transition action of a token definition."""
|
383
|
+
if isinstance(new_state, str):
|
384
|
+
# an existing state
|
385
|
+
if new_state == '#pop':
|
386
|
+
return -1
|
387
|
+
elif new_state in unprocessed:
|
388
|
+
return (new_state,)
|
389
|
+
elif new_state == '#push':
|
390
|
+
return new_state
|
391
|
+
elif new_state[:5] == '#pop:':
|
392
|
+
return -int(new_state[5:])
|
393
|
+
else:
|
394
|
+
assert False, 'unknown new state %r' % new_state
|
395
|
+
elif isinstance(new_state, combined):
|
396
|
+
# combine a new state from existing ones
|
397
|
+
tmp_state = '_tmp_%d' % cls._tmpname
|
398
|
+
cls._tmpname += 1
|
399
|
+
itokens = []
|
400
|
+
for istate in new_state:
|
401
|
+
assert istate != new_state, 'circular state ref %r' % istate
|
402
|
+
itokens.extend(cls._process_state(unprocessed,
|
403
|
+
processed, istate))
|
404
|
+
processed[tmp_state] = itokens
|
405
|
+
return (tmp_state,)
|
406
|
+
elif isinstance(new_state, tuple):
|
407
|
+
# push more than one state
|
408
|
+
for istate in new_state:
|
409
|
+
assert (istate in unprocessed or
|
410
|
+
istate in ('#pop', '#push')), \
|
411
|
+
'unknown new state ' + istate
|
412
|
+
return new_state
|
413
|
+
else:
|
414
|
+
assert False, 'unknown new state def %r' % new_state
|
415
|
+
|
416
|
+
def _process_state(cls, unprocessed, processed, state):
|
417
|
+
"""Preprocess a single state definition."""
|
418
|
+
assert type(state) is str, "wrong state name %r" % state
|
419
|
+
assert state[0] != '#', "invalid state name %r" % state
|
420
|
+
if state in processed:
|
421
|
+
return processed[state]
|
422
|
+
tokens = processed[state] = []
|
423
|
+
rflags = cls.flags
|
424
|
+
for tdef in unprocessed[state]:
|
425
|
+
if isinstance(tdef, include):
|
426
|
+
# it's a state reference
|
427
|
+
assert tdef != state, "circular state reference %r" % state
|
428
|
+
tokens.extend(cls._process_state(unprocessed, processed,
|
429
|
+
str(tdef)))
|
430
|
+
continue
|
431
|
+
|
432
|
+
assert type(tdef) is tuple, "wrong rule def %r" % tdef
|
433
|
+
|
434
|
+
try:
|
435
|
+
rex = cls._process_regex(tdef[0], rflags)
|
436
|
+
except Exception, err:
|
437
|
+
raise ValueError("uncompilable regex %r in state %r of %r: %s" %
|
438
|
+
(tdef[0], state, cls, err))
|
439
|
+
|
440
|
+
token = cls._process_token(tdef[1])
|
441
|
+
|
442
|
+
if len(tdef) == 2:
|
443
|
+
new_state = None
|
444
|
+
else:
|
445
|
+
new_state = cls._process_new_state(tdef[2],
|
446
|
+
unprocessed, processed)
|
447
|
+
|
448
|
+
tokens.append((rex, token, new_state))
|
449
|
+
return tokens
|
450
|
+
|
451
|
+
def process_tokendef(cls, name, tokendefs=None):
|
452
|
+
"""Preprocess a dictionary of token definitions."""
|
453
|
+
processed = cls._all_tokens[name] = {}
|
454
|
+
tokendefs = tokendefs or cls.tokens[name]
|
455
|
+
for state in tokendefs.keys():
|
456
|
+
cls._process_state(tokendefs, processed, state)
|
457
|
+
return processed
|
458
|
+
|
459
|
+
def __call__(cls, *args, **kwds):
|
460
|
+
"""Instantiate cls after preprocessing its token definitions."""
|
461
|
+
if '_tokens' not in cls.__dict__:
|
462
|
+
cls._all_tokens = {}
|
463
|
+
cls._tmpname = 0
|
464
|
+
if hasattr(cls, 'token_variants') and cls.token_variants:
|
465
|
+
# don't process yet
|
466
|
+
pass
|
467
|
+
else:
|
468
|
+
cls._tokens = cls.process_tokendef('', cls.tokens)
|
469
|
+
|
470
|
+
return type.__call__(cls, *args, **kwds)
|
471
|
+
|
472
|
+
|
473
|
+
class RegexLexer(Lexer):
|
474
|
+
"""
|
475
|
+
Base for simple stateful regular expression-based lexers.
|
476
|
+
Simplifies the lexing process so that you need only
|
477
|
+
provide a list of states and regular expressions.
|
478
|
+
"""
|
479
|
+
__metaclass__ = RegexLexerMeta
|
480
|
+
|
481
|
+
#: Flags for compiling the regular expressions.
|
482
|
+
#: Defaults to MULTILINE.
|
483
|
+
flags = re.MULTILINE
|
484
|
+
|
485
|
+
#: Dict of ``{'state': [(regex, tokentype, new_state), ...], ...}``
|
486
|
+
#:
|
487
|
+
#: The initial state is 'root'.
|
488
|
+
#: ``new_state`` can be omitted to signify no state transition.
|
489
|
+
#: If it is a string, the state is pushed on the stack and changed.
|
490
|
+
#: If it is a tuple of strings, all states are pushed on the stack and
|
491
|
+
#: the current state will be the topmost.
|
492
|
+
#: It can also be ``combined('state1', 'state2', ...)``
|
493
|
+
#: to signify a new, anonymous state combined from the rules of two
|
494
|
+
#: or more existing ones.
|
495
|
+
#: Furthermore, it can be '#pop' to signify going back one step in
|
496
|
+
#: the state stack, or '#push' to push the current state on the stack
|
497
|
+
#: again.
|
498
|
+
#:
|
499
|
+
#: The tuple can also be replaced with ``include('state')``, in which
|
500
|
+
#: case the rules from the state named by the string are included in the
|
501
|
+
#: current one.
|
502
|
+
tokens = {}
|
503
|
+
|
504
|
+
def get_tokens_unprocessed(self, text, stack=('root',)):
|
505
|
+
"""
|
506
|
+
Split ``text`` into (tokentype, text) pairs.
|
507
|
+
|
508
|
+
``stack`` is the inital stack (default: ``['root']``)
|
509
|
+
"""
|
510
|
+
pos = 0
|
511
|
+
tokendefs = self._tokens
|
512
|
+
statestack = list(stack)
|
513
|
+
statetokens = tokendefs[statestack[-1]]
|
514
|
+
while 1:
|
515
|
+
for rexmatch, action, new_state in statetokens:
|
516
|
+
m = rexmatch(text, pos)
|
517
|
+
if m:
|
518
|
+
if type(action) is _TokenType:
|
519
|
+
yield pos, action, m.group()
|
520
|
+
else:
|
521
|
+
for item in action(self, m):
|
522
|
+
yield item
|
523
|
+
pos = m.end()
|
524
|
+
if new_state is not None:
|
525
|
+
# state transition
|
526
|
+
if isinstance(new_state, tuple):
|
527
|
+
for state in new_state:
|
528
|
+
if state == '#pop':
|
529
|
+
statestack.pop()
|
530
|
+
elif state == '#push':
|
531
|
+
statestack.append(statestack[-1])
|
532
|
+
else:
|
533
|
+
statestack.append(state)
|
534
|
+
elif isinstance(new_state, int):
|
535
|
+
# pop
|
536
|
+
del statestack[new_state:]
|
537
|
+
elif new_state == '#push':
|
538
|
+
statestack.append(statestack[-1])
|
539
|
+
else:
|
540
|
+
assert False, "wrong state def: %r" % new_state
|
541
|
+
statetokens = tokendefs[statestack[-1]]
|
542
|
+
break
|
543
|
+
else:
|
544
|
+
try:
|
545
|
+
if text[pos] == '\n':
|
546
|
+
# at EOL, reset state to "root"
|
547
|
+
statestack = ['root']
|
548
|
+
statetokens = tokendefs['root']
|
549
|
+
yield pos, Text, u'\n'
|
550
|
+
pos += 1
|
551
|
+
continue
|
552
|
+
yield pos, Error, text[pos]
|
553
|
+
pos += 1
|
554
|
+
except IndexError:
|
555
|
+
break
|
556
|
+
|
557
|
+
|
558
|
+
class LexerContext(object):
|
559
|
+
"""
|
560
|
+
A helper object that holds lexer position data.
|
561
|
+
"""
|
562
|
+
|
563
|
+
def __init__(self, text, pos, stack=None, end=None):
|
564
|
+
self.text = text
|
565
|
+
self.pos = pos
|
566
|
+
self.end = end or len(text) # end=0 not supported ;-)
|
567
|
+
self.stack = stack or ['root']
|
568
|
+
|
569
|
+
def __repr__(self):
|
570
|
+
return 'LexerContext(%r, %r, %r)' % (
|
571
|
+
self.text, self.pos, self.stack)
|
572
|
+
|
573
|
+
|
574
|
+
class ExtendedRegexLexer(RegexLexer):
|
575
|
+
"""
|
576
|
+
A RegexLexer that uses a context object to store its state.
|
577
|
+
"""
|
578
|
+
|
579
|
+
def get_tokens_unprocessed(self, text=None, context=None):
|
580
|
+
"""
|
581
|
+
Split ``text`` into (tokentype, text) pairs.
|
582
|
+
If ``context`` is given, use this lexer context instead.
|
583
|
+
"""
|
584
|
+
tokendefs = self._tokens
|
585
|
+
if not context:
|
586
|
+
ctx = LexerContext(text, 0)
|
587
|
+
statetokens = tokendefs['root']
|
588
|
+
else:
|
589
|
+
ctx = context
|
590
|
+
statetokens = tokendefs[ctx.stack[-1]]
|
591
|
+
text = ctx.text
|
592
|
+
while 1:
|
593
|
+
for rexmatch, action, new_state in statetokens:
|
594
|
+
m = rexmatch(text, ctx.pos, ctx.end)
|
595
|
+
if m:
|
596
|
+
if type(action) is _TokenType:
|
597
|
+
yield ctx.pos, action, m.group()
|
598
|
+
ctx.pos = m.end()
|
599
|
+
else:
|
600
|
+
for item in action(self, m, ctx):
|
601
|
+
yield item
|
602
|
+
if not new_state:
|
603
|
+
# altered the state stack?
|
604
|
+
statetokens = tokendefs[ctx.stack[-1]]
|
605
|
+
# CAUTION: callback must set ctx.pos!
|
606
|
+
if new_state is not None:
|
607
|
+
# state transition
|
608
|
+
if isinstance(new_state, tuple):
|
609
|
+
ctx.stack.extend(new_state)
|
610
|
+
elif isinstance(new_state, int):
|
611
|
+
# pop
|
612
|
+
del ctx.stack[new_state:]
|
613
|
+
elif new_state == '#push':
|
614
|
+
ctx.stack.append(ctx.stack[-1])
|
615
|
+
else:
|
616
|
+
assert False, "wrong state def: %r" % new_state
|
617
|
+
statetokens = tokendefs[ctx.stack[-1]]
|
618
|
+
break
|
619
|
+
else:
|
620
|
+
try:
|
621
|
+
if ctx.pos >= ctx.end:
|
622
|
+
break
|
623
|
+
if text[ctx.pos] == '\n':
|
624
|
+
# at EOL, reset state to "root"
|
625
|
+
ctx.pos += 1
|
626
|
+
ctx.stack = ['root']
|
627
|
+
statetokens = tokendefs['root']
|
628
|
+
yield ctx.pos, Text, u'\n'
|
629
|
+
continue
|
630
|
+
yield ctx.pos, Error, text[ctx.pos]
|
631
|
+
ctx.pos += 1
|
632
|
+
except IndexError:
|
633
|
+
break
|
634
|
+
|
635
|
+
|
636
|
+
def do_insertions(insertions, tokens):
|
637
|
+
"""
|
638
|
+
Helper for lexers which must combine the results of several
|
639
|
+
sublexers.
|
640
|
+
|
641
|
+
``insertions`` is a list of ``(index, itokens)`` pairs.
|
642
|
+
Each ``itokens`` iterable should be inserted at position
|
643
|
+
``index`` into the token stream given by the ``tokens``
|
644
|
+
argument.
|
645
|
+
|
646
|
+
The result is a combined token stream.
|
647
|
+
|
648
|
+
TODO: clean up the code here.
|
649
|
+
"""
|
650
|
+
insertions = iter(insertions)
|
651
|
+
try:
|
652
|
+
index, itokens = insertions.next()
|
653
|
+
except StopIteration:
|
654
|
+
# no insertions
|
655
|
+
for item in tokens:
|
656
|
+
yield item
|
657
|
+
return
|
658
|
+
|
659
|
+
realpos = None
|
660
|
+
insleft = True
|
661
|
+
|
662
|
+
# iterate over the token stream where we want to insert
|
663
|
+
# the tokens from the insertion list.
|
664
|
+
for i, t, v in tokens:
|
665
|
+
# first iteration. store the postition of first item
|
666
|
+
if realpos is None:
|
667
|
+
realpos = i
|
668
|
+
oldi = 0
|
669
|
+
while insleft and i + len(v) >= index:
|
670
|
+
tmpval = v[oldi:index - i]
|
671
|
+
yield realpos, t, tmpval
|
672
|
+
realpos += len(tmpval)
|
673
|
+
for it_index, it_token, it_value in itokens:
|
674
|
+
yield realpos, it_token, it_value
|
675
|
+
realpos += len(it_value)
|
676
|
+
oldi = index - i
|
677
|
+
try:
|
678
|
+
index, itokens = insertions.next()
|
679
|
+
except StopIteration:
|
680
|
+
insleft = False
|
681
|
+
break # not strictly necessary
|
682
|
+
yield realpos, t, v[oldi:]
|
683
|
+
realpos += len(v) - oldi
|
684
|
+
|
685
|
+
# leftover tokens
|
686
|
+
while insleft:
|
687
|
+
# no normal tokens, set realpos to zero
|
688
|
+
realpos = realpos or 0
|
689
|
+
for p, t, v in itokens:
|
690
|
+
yield realpos, t, v
|
691
|
+
realpos += len(v)
|
692
|
+
try:
|
693
|
+
index, itokens = insertions.next()
|
694
|
+
except StopIteration:
|
695
|
+
insleft = False
|
696
|
+
break # not strictly necessary
|
697
|
+
|