pygments.rb 1.2.1 → 2.0.0.rc2
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +5 -5
- data/.github/dependabot.yml +13 -0
- data/.github/workflows/ci.yml +28 -0
- data/.github/workflows/release.yml +24 -0
- data/.gitignore +5 -6
- data/CHANGELOG.adoc +119 -0
- data/Gemfile +3 -1
- data/LICENSE +1 -1
- data/README.adoc +161 -0
- data/Rakefile +10 -21
- data/bench.rb +8 -7
- data/cache-lexers.rb +3 -2
- data/lib/pygments.rb +10 -11
- data/lib/pygments/lexer.rb +5 -5
- data/lib/pygments/mentos.py +23 -66
- data/lib/pygments/popen.rb +152 -204
- data/lib/pygments/version.rb +2 -2
- data/pygments.rb.gemspec +11 -9
- data/test/test_pygments.rb +51 -84
- data/vendor/pygments-main/{AUTHORS → Pygments-2.7.3.dist-info/AUTHORS} +21 -3
- data/vendor/pygments-main/Pygments-2.7.3.dist-info/INSTALLER +1 -0
- data/vendor/pygments-main/{LICENSE → Pygments-2.7.3.dist-info/LICENSE} +1 -1
- data/vendor/pygments-main/Pygments-2.7.3.dist-info/METADATA +49 -0
- data/vendor/pygments-main/Pygments-2.7.3.dist-info/RECORD +482 -0
- data/vendor/pygments-main/Pygments-2.7.3.dist-info/REQUESTED +0 -0
- data/vendor/pygments-main/Pygments-2.7.3.dist-info/WHEEL +5 -0
- data/vendor/pygments-main/Pygments-2.7.3.dist-info/entry_points.txt +3 -0
- data/vendor/pygments-main/Pygments-2.7.3.dist-info/top_level.txt +1 -0
- data/vendor/pygments-main/bin/pygmentize +8 -0
- data/vendor/pygments-main/pygments/__init__.py +6 -11
- data/vendor/pygments-main/pygments/__main__.py +18 -0
- data/vendor/pygments-main/pygments/cmdline.py +38 -29
- data/vendor/pygments-main/pygments/console.py +6 -9
- data/vendor/pygments-main/pygments/filter.py +4 -6
- data/vendor/pygments-main/pygments/filters/__init__.py +609 -21
- data/vendor/pygments-main/pygments/formatter.py +4 -4
- data/vendor/pygments-main/pygments/formatters/__init__.py +9 -8
- data/vendor/pygments-main/pygments/formatters/_mapping.py +1 -3
- data/vendor/pygments-main/pygments/formatters/bbcode.py +1 -1
- data/vendor/pygments-main/pygments/formatters/html.py +223 -135
- data/vendor/pygments-main/pygments/formatters/img.py +68 -41
- data/vendor/pygments-main/pygments/formatters/irc.py +39 -39
- data/vendor/pygments-main/pygments/formatters/latex.py +56 -26
- data/vendor/pygments-main/pygments/formatters/other.py +12 -8
- data/vendor/pygments-main/pygments/formatters/rtf.py +29 -29
- data/vendor/pygments-main/pygments/formatters/svg.py +38 -4
- data/vendor/pygments-main/pygments/formatters/terminal.py +25 -31
- data/vendor/pygments-main/pygments/formatters/terminal256.py +22 -12
- data/vendor/pygments-main/pygments/lexer.py +41 -39
- data/vendor/pygments-main/pygments/lexers/__init__.py +342 -0
- data/vendor/pygments-main/pygments/lexers/_asy_builtins.py +1645 -0
- data/vendor/pygments-main/pygments/lexers/_cl_builtins.py +232 -0
- data/vendor/pygments-main/pygments/lexers/_cocoa_builtins.py +71 -0
- data/vendor/pygments-main/pygments/lexers/_csound_builtins.py +1725 -0
- data/vendor/pygments-main/pygments/lexers/_lasso_builtins.py +5327 -0
- data/vendor/pygments-main/pygments/lexers/_lua_builtins.py +293 -0
- data/vendor/pygments-main/pygments/lexers/_mapping.py +551 -0
- data/vendor/pygments-main/pygments/lexers/_mql_builtins.py +1172 -0
- data/vendor/pygments-main/pygments/lexers/_mysql_builtins.py +1282 -0
- data/vendor/pygments-main/pygments/lexers/_openedge_builtins.py +2547 -0
- data/vendor/pygments-main/pygments/lexers/_php_builtins.py +4753 -0
- data/vendor/pygments-main/pygments/lexers/_postgres_builtins.py +678 -0
- data/vendor/pygments-main/pygments/lexers/_scilab_builtins.py +3094 -0
- data/vendor/pygments-main/pygments/lexers/_sourcemod_builtins.py +1161 -0
- data/vendor/pygments-main/pygments/lexers/_stan_builtins.py +558 -0
- data/vendor/pygments-main/pygments/lexers/_stata_builtins.py +421 -0
- data/vendor/pygments-main/pygments/lexers/_tsql_builtins.py +1004 -0
- data/vendor/pygments-main/pygments/lexers/_usd_builtins.py +113 -0
- data/vendor/pygments-main/pygments/lexers/_vbscript_builtins.py +280 -0
- data/vendor/pygments-main/pygments/lexers/_vim_builtins.py +1939 -0
- data/vendor/pygments-main/pygments/lexers/actionscript.py +245 -0
- data/vendor/pygments-main/pygments/lexers/agile.py +24 -0
- data/vendor/pygments-main/pygments/lexers/algebra.py +240 -0
- data/vendor/pygments-main/pygments/lexers/ambient.py +76 -0
- data/vendor/pygments-main/pygments/lexers/ampl.py +87 -0
- data/vendor/pygments-main/pygments/lexers/apl.py +101 -0
- data/vendor/pygments-main/pygments/lexers/archetype.py +318 -0
- data/vendor/pygments-main/pygments/lexers/arrow.py +117 -0
- data/vendor/pygments-main/pygments/lexers/asm.py +1005 -0
- data/vendor/pygments-main/pygments/lexers/automation.py +374 -0
- data/vendor/pygments-main/pygments/lexers/bare.py +104 -0
- data/vendor/pygments-main/pygments/lexers/basic.py +662 -0
- data/vendor/pygments-main/pygments/lexers/bibtex.py +160 -0
- data/vendor/pygments-main/pygments/lexers/boa.py +102 -0
- data/vendor/pygments-main/pygments/lexers/business.py +627 -0
- data/vendor/pygments-main/pygments/lexers/c_cpp.py +344 -0
- data/vendor/pygments-main/pygments/lexers/c_like.py +566 -0
- data/vendor/pygments-main/pygments/lexers/capnproto.py +78 -0
- data/vendor/pygments-main/pygments/lexers/chapel.py +112 -0
- data/vendor/pygments-main/pygments/lexers/clean.py +179 -0
- data/vendor/pygments-main/pygments/lexers/compiled.py +34 -0
- data/vendor/pygments-main/pygments/lexers/configs.py +984 -0
- data/vendor/pygments-main/pygments/lexers/console.py +114 -0
- data/vendor/pygments-main/pygments/lexers/crystal.py +392 -0
- data/vendor/pygments-main/pygments/lexers/csound.py +467 -0
- data/vendor/pygments-main/pygments/lexers/css.py +691 -0
- data/vendor/pygments-main/pygments/lexers/d.py +256 -0
- data/vendor/pygments-main/pygments/lexers/dalvik.py +125 -0
- data/vendor/pygments-main/pygments/lexers/data.py +698 -0
- data/vendor/pygments-main/pygments/lexers/devicetree.py +109 -0
- data/vendor/pygments-main/pygments/lexers/diff.py +165 -0
- data/vendor/pygments-main/pygments/lexers/dotnet.py +707 -0
- data/vendor/pygments-main/pygments/lexers/dsls.py +960 -0
- data/vendor/pygments-main/pygments/lexers/dylan.py +287 -0
- data/vendor/pygments-main/pygments/lexers/ecl.py +139 -0
- data/vendor/pygments-main/pygments/lexers/eiffel.py +65 -0
- data/vendor/pygments-main/pygments/lexers/elm.py +121 -0
- data/vendor/pygments-main/pygments/lexers/email.py +151 -0
- data/vendor/pygments-main/pygments/lexers/erlang.py +530 -0
- data/vendor/pygments-main/pygments/lexers/esoteric.py +304 -0
- data/vendor/pygments-main/pygments/lexers/ezhil.py +77 -0
- data/vendor/pygments-main/pygments/lexers/factor.py +344 -0
- data/vendor/pygments-main/pygments/lexers/fantom.py +250 -0
- data/vendor/pygments-main/pygments/lexers/felix.py +273 -0
- data/vendor/pygments-main/pygments/lexers/floscript.py +83 -0
- data/vendor/pygments-main/pygments/lexers/forth.py +178 -0
- data/vendor/pygments-main/pygments/lexers/fortran.py +206 -0
- data/vendor/pygments-main/pygments/lexers/foxpro.py +428 -0
- data/vendor/pygments-main/pygments/lexers/freefem.py +898 -0
- data/vendor/pygments-main/pygments/lexers/functional.py +21 -0
- data/vendor/pygments-main/pygments/lexers/gdscript.py +346 -0
- data/vendor/pygments-main/pygments/lexers/go.py +101 -0
- data/vendor/pygments-main/pygments/lexers/grammar_notation.py +270 -0
- data/vendor/pygments-main/pygments/lexers/graph.py +85 -0
- data/vendor/pygments-main/pygments/lexers/graphics.py +800 -0
- data/vendor/pygments-main/pygments/lexers/haskell.py +870 -0
- data/vendor/pygments-main/pygments/lexers/haxe.py +936 -0
- data/vendor/pygments-main/pygments/lexers/hdl.py +472 -0
- data/vendor/pygments-main/pygments/lexers/hexdump.py +103 -0
- data/vendor/pygments-main/pygments/lexers/html.py +614 -0
- data/vendor/pygments-main/pygments/lexers/idl.py +281 -0
- data/vendor/pygments-main/pygments/lexers/igor.py +420 -0
- data/vendor/pygments-main/pygments/lexers/inferno.py +96 -0
- data/vendor/pygments-main/pygments/lexers/installers.py +322 -0
- data/vendor/pygments-main/pygments/lexers/int_fiction.py +1368 -0
- data/vendor/pygments-main/pygments/lexers/iolang.py +63 -0
- data/vendor/pygments-main/pygments/lexers/j.py +146 -0
- data/vendor/pygments-main/pygments/lexers/javascript.py +1540 -0
- data/vendor/pygments-main/pygments/lexers/julia.py +331 -0
- data/vendor/pygments-main/pygments/lexers/jvm.py +1673 -0
- data/vendor/pygments-main/pygments/lexers/lisp.py +2699 -0
- data/vendor/pygments-main/pygments/lexers/make.py +206 -0
- data/vendor/pygments-main/pygments/lexers/markup.py +765 -0
- data/vendor/pygments-main/pygments/lexers/math.py +21 -0
- data/vendor/pygments-main/pygments/lexers/matlab.py +720 -0
- data/vendor/pygments-main/pygments/lexers/mime.py +226 -0
- data/vendor/pygments-main/pygments/lexers/ml.py +958 -0
- data/vendor/pygments-main/pygments/lexers/modeling.py +366 -0
- data/vendor/pygments-main/pygments/lexers/modula2.py +1580 -0
- data/vendor/pygments-main/pygments/lexers/monte.py +204 -0
- data/vendor/pygments-main/pygments/lexers/mosel.py +448 -0
- data/vendor/pygments-main/pygments/lexers/ncl.py +894 -0
- data/vendor/pygments-main/pygments/lexers/nimrod.py +159 -0
- data/vendor/pygments-main/pygments/lexers/nit.py +64 -0
- data/vendor/pygments-main/pygments/lexers/nix.py +136 -0
- data/vendor/pygments-main/pygments/lexers/oberon.py +121 -0
- data/vendor/pygments-main/pygments/lexers/objective.py +504 -0
- data/vendor/pygments-main/pygments/lexers/ooc.py +85 -0
- data/vendor/pygments-main/pygments/lexers/other.py +41 -0
- data/vendor/pygments-main/pygments/lexers/parasail.py +79 -0
- data/vendor/pygments-main/pygments/lexers/parsers.py +800 -0
- data/vendor/pygments-main/pygments/lexers/pascal.py +644 -0
- data/vendor/pygments-main/pygments/lexers/pawn.py +205 -0
- data/vendor/pygments-main/pygments/lexers/perl.py +732 -0
- data/vendor/pygments-main/pygments/lexers/php.py +321 -0
- data/vendor/pygments-main/pygments/lexers/pointless.py +71 -0
- data/vendor/pygments-main/pygments/lexers/pony.py +94 -0
- data/vendor/pygments-main/pygments/lexers/praat.py +302 -0
- data/vendor/pygments-main/pygments/lexers/prolog.py +306 -0
- data/vendor/pygments-main/pygments/lexers/promql.py +183 -0
- data/vendor/pygments-main/pygments/lexers/python.py +1151 -0
- data/vendor/pygments-main/pygments/lexers/qvt.py +152 -0
- data/vendor/pygments-main/pygments/lexers/r.py +191 -0
- data/vendor/pygments-main/pygments/lexers/rdf.py +463 -0
- data/vendor/pygments-main/pygments/lexers/rebol.py +431 -0
- data/vendor/pygments-main/pygments/lexers/resource.py +85 -0
- data/vendor/pygments-main/pygments/lexers/ride.py +139 -0
- data/vendor/pygments-main/pygments/lexers/rnc.py +67 -0
- data/vendor/pygments-main/pygments/lexers/roboconf.py +82 -0
- data/vendor/pygments-main/pygments/lexers/robotframework.py +552 -0
- data/vendor/pygments-main/pygments/lexers/ruby.py +517 -0
- data/vendor/pygments-main/pygments/lexers/rust.py +224 -0
- data/vendor/pygments-main/pygments/lexers/sas.py +228 -0
- data/vendor/pygments-main/pygments/lexers/scdoc.py +83 -0
- data/vendor/pygments-main/pygments/lexers/scripting.py +1284 -0
- data/vendor/pygments-main/pygments/lexers/sgf.py +61 -0
- data/vendor/pygments-main/pygments/lexers/shell.py +914 -0
- data/vendor/pygments-main/pygments/lexers/sieve.py +69 -0
- data/vendor/pygments-main/pygments/lexers/slash.py +185 -0
- data/vendor/pygments-main/pygments/lexers/smalltalk.py +195 -0
- data/vendor/pygments-main/pygments/lexers/smv.py +79 -0
- data/vendor/pygments-main/pygments/lexers/snobol.py +83 -0
- data/vendor/pygments-main/pygments/lexers/solidity.py +92 -0
- data/vendor/pygments-main/pygments/lexers/special.py +105 -0
- data/vendor/pygments-main/pygments/lexers/sql.py +837 -0
- data/vendor/pygments-main/pygments/lexers/stata.py +171 -0
- data/vendor/pygments-main/pygments/lexers/supercollider.py +95 -0
- data/vendor/pygments-main/pygments/lexers/tcl.py +145 -0
- data/vendor/pygments-main/pygments/lexers/templates.py +2264 -0
- data/vendor/pygments-main/pygments/lexers/teraterm.py +335 -0
- data/vendor/pygments-main/pygments/lexers/testing.py +207 -0
- data/vendor/pygments-main/pygments/lexers/text.py +26 -0
- data/vendor/pygments-main/pygments/lexers/textedit.py +169 -0
- data/vendor/pygments-main/pygments/lexers/textfmts.py +430 -0
- data/vendor/pygments-main/pygments/lexers/theorem.py +474 -0
- data/vendor/pygments-main/pygments/lexers/tnt.py +263 -0
- data/vendor/pygments-main/pygments/lexers/trafficscript.py +54 -0
- data/vendor/pygments-main/pygments/lexers/typoscript.py +219 -0
- data/vendor/pygments-main/pygments/lexers/unicon.py +412 -0
- data/vendor/pygments-main/pygments/lexers/urbi.py +146 -0
- data/vendor/pygments-main/pygments/lexers/usd.py +90 -0
- data/vendor/pygments-main/pygments/lexers/varnish.py +190 -0
- data/vendor/pygments-main/pygments/lexers/verification.py +114 -0
- data/vendor/pygments-main/pygments/lexers/web.py +24 -0
- data/vendor/pygments-main/pygments/lexers/webidl.py +299 -0
- data/vendor/pygments-main/pygments/lexers/webmisc.py +991 -0
- data/vendor/pygments-main/pygments/lexers/whiley.py +116 -0
- data/vendor/pygments-main/pygments/lexers/x10.py +69 -0
- data/vendor/pygments-main/pygments/lexers/xorg.py +37 -0
- data/vendor/pygments-main/pygments/lexers/yang.py +104 -0
- data/vendor/pygments-main/pygments/lexers/zig.py +124 -0
- data/vendor/pygments-main/pygments/modeline.py +1 -1
- data/vendor/pygments-main/pygments/plugin.py +4 -2
- data/vendor/pygments-main/pygments/regexopt.py +1 -1
- data/vendor/pygments-main/pygments/scanner.py +2 -2
- data/vendor/pygments-main/pygments/sphinxext.py +2 -4
- data/vendor/pygments-main/pygments/style.py +61 -24
- data/vendor/pygments-main/pygments/styles/__init__.py +10 -4
- data/vendor/pygments-main/pygments/styles/abap.py +1 -1
- data/vendor/pygments-main/pygments/styles/algol.py +1 -1
- data/vendor/pygments-main/pygments/styles/algol_nu.py +1 -1
- data/vendor/pygments-main/pygments/styles/arduino.py +2 -2
- data/vendor/pygments-main/pygments/styles/autumn.py +1 -1
- data/vendor/pygments-main/pygments/styles/borland.py +1 -1
- data/vendor/pygments-main/pygments/styles/bw.py +1 -1
- data/vendor/pygments-main/pygments/styles/colorful.py +1 -1
- data/vendor/pygments-main/pygments/styles/default.py +1 -1
- data/vendor/pygments-main/pygments/styles/emacs.py +1 -1
- data/vendor/pygments-main/pygments/styles/friendly.py +1 -1
- data/vendor/pygments-main/pygments/styles/fruity.py +1 -1
- data/vendor/pygments-main/pygments/styles/igor.py +1 -1
- data/vendor/pygments-main/pygments/styles/inkpot.py +67 -0
- data/vendor/pygments-main/pygments/styles/lovelace.py +1 -1
- data/vendor/pygments-main/pygments/styles/manni.py +1 -1
- data/vendor/pygments-main/pygments/styles/monokai.py +4 -3
- data/vendor/pygments-main/pygments/styles/murphy.py +1 -1
- data/vendor/pygments-main/pygments/styles/native.py +1 -1
- data/vendor/pygments-main/pygments/styles/paraiso_dark.py +1 -1
- data/vendor/pygments-main/pygments/styles/paraiso_light.py +1 -1
- data/vendor/pygments-main/pygments/styles/pastie.py +1 -1
- data/vendor/pygments-main/pygments/styles/perldoc.py +1 -1
- data/vendor/pygments-main/pygments/styles/rainbow_dash.py +1 -1
- data/vendor/pygments-main/pygments/styles/rrt.py +1 -1
- data/vendor/pygments-main/pygments/styles/sas.py +1 -1
- data/vendor/pygments-main/pygments/styles/solarized.py +134 -0
- data/vendor/pygments-main/pygments/styles/stata_dark.py +41 -0
- data/vendor/pygments-main/pygments/styles/{stata.py → stata_light.py} +14 -15
- data/vendor/pygments-main/pygments/styles/tango.py +1 -1
- data/vendor/pygments-main/pygments/styles/trac.py +1 -1
- data/vendor/pygments-main/pygments/styles/vim.py +1 -1
- data/vendor/pygments-main/pygments/styles/vs.py +1 -1
- data/vendor/pygments-main/pygments/styles/xcode.py +1 -1
- data/vendor/pygments-main/pygments/token.py +1 -1
- data/vendor/pygments-main/pygments/unistring.py +47 -108
- data/vendor/pygments-main/pygments/util.py +15 -92
- metadata +69 -136
- data/CHANGELOG.md +0 -111
- data/README.md +0 -121
- data/circle.yml +0 -20
- data/test/test_data.py +0 -514
- data/test/test_data_generated +0 -2582
- data/vendor/custom_lexers/github.py +0 -565
- data/vendor/pygments-main/CHANGES +0 -1186
- data/vendor/pygments-main/MANIFEST.in +0 -6
- data/vendor/pygments-main/Makefile +0 -65
- data/vendor/pygments-main/README.rst +0 -39
- data/vendor/pygments-main/REVISION +0 -1
- data/vendor/pygments-main/TODO +0 -12
- data/vendor/pygments-main/doc/Makefile +0 -153
- data/vendor/pygments-main/doc/_static/favicon.ico +0 -0
- data/vendor/pygments-main/doc/_static/logo_new.png +0 -0
- data/vendor/pygments-main/doc/_static/logo_only.png +0 -0
- data/vendor/pygments-main/doc/_templates/docssidebar.html +0 -3
- data/vendor/pygments-main/doc/_templates/indexsidebar.html +0 -25
- data/vendor/pygments-main/doc/_themes/pygments14/layout.html +0 -98
- data/vendor/pygments-main/doc/_themes/pygments14/static/bodybg.png +0 -0
- data/vendor/pygments-main/doc/_themes/pygments14/static/docbg.png +0 -0
- data/vendor/pygments-main/doc/_themes/pygments14/static/listitem.png +0 -0
- data/vendor/pygments-main/doc/_themes/pygments14/static/logo.png +0 -0
- data/vendor/pygments-main/doc/_themes/pygments14/static/pocoo.png +0 -0
- data/vendor/pygments-main/doc/_themes/pygments14/static/pygments14.css_t +0 -401
- data/vendor/pygments-main/doc/_themes/pygments14/theme.conf +0 -15
- data/vendor/pygments-main/doc/conf.py +0 -241
- data/vendor/pygments-main/doc/docs/api.rst +0 -354
- data/vendor/pygments-main/doc/docs/authors.rst +0 -4
- data/vendor/pygments-main/doc/docs/changelog.rst +0 -1
- data/vendor/pygments-main/doc/docs/cmdline.rst +0 -166
- data/vendor/pygments-main/doc/docs/filterdevelopment.rst +0 -71
- data/vendor/pygments-main/doc/docs/filters.rst +0 -41
- data/vendor/pygments-main/doc/docs/formatterdevelopment.rst +0 -169
- data/vendor/pygments-main/doc/docs/formatters.rst +0 -48
- data/vendor/pygments-main/doc/docs/index.rst +0 -66
- data/vendor/pygments-main/doc/docs/integrate.rst +0 -40
- data/vendor/pygments-main/doc/docs/java.rst +0 -70
- data/vendor/pygments-main/doc/docs/lexerdevelopment.rst +0 -728
- data/vendor/pygments-main/doc/docs/lexers.rst +0 -69
- data/vendor/pygments-main/doc/docs/moinmoin.rst +0 -39
- data/vendor/pygments-main/doc/docs/plugins.rst +0 -93
- data/vendor/pygments-main/doc/docs/quickstart.rst +0 -205
- data/vendor/pygments-main/doc/docs/rstdirective.rst +0 -22
- data/vendor/pygments-main/doc/docs/styles.rst +0 -201
- data/vendor/pygments-main/doc/docs/tokens.rst +0 -372
- data/vendor/pygments-main/doc/docs/unicode.rst +0 -58
- data/vendor/pygments-main/doc/download.rst +0 -41
- data/vendor/pygments-main/doc/faq.rst +0 -139
- data/vendor/pygments-main/doc/index.rst +0 -54
- data/vendor/pygments-main/doc/languages.rst +0 -154
- data/vendor/pygments-main/doc/make.bat +0 -190
- data/vendor/pygments-main/doc/pygmentize.1 +0 -94
- data/vendor/pygments-main/external/autopygmentize +0 -101
- data/vendor/pygments-main/external/lasso-builtins-generator-9.lasso +0 -162
- data/vendor/pygments-main/external/markdown-processor.py +0 -67
- data/vendor/pygments-main/external/moin-parser.py +0 -112
- data/vendor/pygments-main/external/pygments.bashcomp +0 -38
- data/vendor/pygments-main/external/rst-directive.py +0 -82
- data/vendor/pygments-main/pygmentize +0 -8
- data/vendor/pygments-main/requirements.txt +0 -5
- data/vendor/pygments-main/scripts/check_sources.py +0 -211
- data/vendor/pygments-main/scripts/debug_lexer.py +0 -246
- data/vendor/pygments-main/scripts/detect_missing_analyse_text.py +0 -33
- data/vendor/pygments-main/scripts/epydoc.css +0 -280
- data/vendor/pygments-main/scripts/get_vimkw.py +0 -74
- data/vendor/pygments-main/scripts/pylintrc +0 -301
- data/vendor/pygments-main/scripts/vim2pygments.py +0 -935
- data/vendor/pygments-main/setup.cfg +0 -10
- data/vendor/pygments-main/setup.py +0 -77
- data/vendor/pygments-main/tox.ini +0 -7
- data/vendor/simplejson/.gitignore +0 -10
- data/vendor/simplejson/.travis.yml +0 -5
- data/vendor/simplejson/CHANGES.txt +0 -291
- data/vendor/simplejson/LICENSE.txt +0 -19
- data/vendor/simplejson/MANIFEST.in +0 -5
- data/vendor/simplejson/README.rst +0 -19
- data/vendor/simplejson/conf.py +0 -179
- data/vendor/simplejson/index.rst +0 -628
- data/vendor/simplejson/scripts/make_docs.py +0 -18
- data/vendor/simplejson/setup.py +0 -104
- data/vendor/simplejson/simplejson/__init__.py +0 -510
- data/vendor/simplejson/simplejson/_speedups.c +0 -2745
- data/vendor/simplejson/simplejson/decoder.py +0 -425
- data/vendor/simplejson/simplejson/encoder.py +0 -567
- data/vendor/simplejson/simplejson/ordered_dict.py +0 -119
- data/vendor/simplejson/simplejson/scanner.py +0 -77
- data/vendor/simplejson/simplejson/tests/__init__.py +0 -67
- data/vendor/simplejson/simplejson/tests/test_bigint_as_string.py +0 -55
- data/vendor/simplejson/simplejson/tests/test_check_circular.py +0 -30
- data/vendor/simplejson/simplejson/tests/test_decimal.py +0 -66
- data/vendor/simplejson/simplejson/tests/test_decode.py +0 -83
- data/vendor/simplejson/simplejson/tests/test_default.py +0 -9
- data/vendor/simplejson/simplejson/tests/test_dump.py +0 -67
- data/vendor/simplejson/simplejson/tests/test_encode_basestring_ascii.py +0 -46
- data/vendor/simplejson/simplejson/tests/test_encode_for_html.py +0 -32
- data/vendor/simplejson/simplejson/tests/test_errors.py +0 -34
- data/vendor/simplejson/simplejson/tests/test_fail.py +0 -91
- data/vendor/simplejson/simplejson/tests/test_float.py +0 -19
- data/vendor/simplejson/simplejson/tests/test_indent.py +0 -86
- data/vendor/simplejson/simplejson/tests/test_item_sort_key.py +0 -20
- data/vendor/simplejson/simplejson/tests/test_namedtuple.py +0 -121
- data/vendor/simplejson/simplejson/tests/test_pass1.py +0 -76
- data/vendor/simplejson/simplejson/tests/test_pass2.py +0 -14
- data/vendor/simplejson/simplejson/tests/test_pass3.py +0 -20
- data/vendor/simplejson/simplejson/tests/test_recursion.py +0 -67
- data/vendor/simplejson/simplejson/tests/test_scanstring.py +0 -117
- data/vendor/simplejson/simplejson/tests/test_separators.py +0 -42
- data/vendor/simplejson/simplejson/tests/test_speedups.py +0 -20
- data/vendor/simplejson/simplejson/tests/test_tuple.py +0 -49
- data/vendor/simplejson/simplejson/tests/test_unicode.py +0 -109
- data/vendor/simplejson/simplejson/tool.py +0 -39
@@ -1,48 +0,0 @@
|
|
1
|
-
.. -*- mode: rst -*-
|
2
|
-
|
3
|
-
====================
|
4
|
-
Available formatters
|
5
|
-
====================
|
6
|
-
|
7
|
-
This page lists all builtin formatters.
|
8
|
-
|
9
|
-
Common options
|
10
|
-
==============
|
11
|
-
|
12
|
-
All formatters support these options:
|
13
|
-
|
14
|
-
`encoding`
|
15
|
-
If given, must be an encoding name (such as ``"utf-8"``). This will
|
16
|
-
be used to convert the token strings (which are Unicode strings)
|
17
|
-
to byte strings in the output (default: ``None``).
|
18
|
-
It will also be written in an encoding declaration suitable for the
|
19
|
-
document format if the `full` option is given (e.g. a ``meta
|
20
|
-
content-type`` directive in HTML or an invocation of the `inputenc`
|
21
|
-
package in LaTeX).
|
22
|
-
|
23
|
-
If this is ``""`` or ``None``, Unicode strings will be written
|
24
|
-
to the output file, which most file-like objects do not support.
|
25
|
-
For example, `pygments.highlight()` will return a Unicode string if
|
26
|
-
called with no `outfile` argument and a formatter that has `encoding`
|
27
|
-
set to ``None`` because it uses a `StringIO.StringIO` object that
|
28
|
-
supports Unicode arguments to `write()`. Using a regular file object
|
29
|
-
wouldn't work.
|
30
|
-
|
31
|
-
.. versionadded:: 0.6
|
32
|
-
|
33
|
-
`outencoding`
|
34
|
-
When using Pygments from the command line, any `encoding` option given is
|
35
|
-
passed to the lexer and the formatter. This is sometimes not desirable,
|
36
|
-
for example if you want to set the input encoding to ``"guess"``.
|
37
|
-
Therefore, `outencoding` has been introduced which overrides `encoding`
|
38
|
-
for the formatter if given.
|
39
|
-
|
40
|
-
.. versionadded:: 0.7
|
41
|
-
|
42
|
-
|
43
|
-
Formatter classes
|
44
|
-
=================
|
45
|
-
|
46
|
-
All these classes are importable from :mod:`pygments.formatters`.
|
47
|
-
|
48
|
-
.. pygmentsdoc:: formatters
|
@@ -1,66 +0,0 @@
|
|
1
|
-
Pygments documentation
|
2
|
-
======================
|
3
|
-
|
4
|
-
**Starting with Pygments**
|
5
|
-
|
6
|
-
.. toctree::
|
7
|
-
:maxdepth: 1
|
8
|
-
|
9
|
-
../download
|
10
|
-
quickstart
|
11
|
-
cmdline
|
12
|
-
|
13
|
-
**Builtin components**
|
14
|
-
|
15
|
-
.. toctree::
|
16
|
-
:maxdepth: 1
|
17
|
-
|
18
|
-
lexers
|
19
|
-
filters
|
20
|
-
formatters
|
21
|
-
styles
|
22
|
-
|
23
|
-
**Reference**
|
24
|
-
|
25
|
-
.. toctree::
|
26
|
-
:maxdepth: 1
|
27
|
-
|
28
|
-
unicode
|
29
|
-
tokens
|
30
|
-
api
|
31
|
-
|
32
|
-
**Hacking for Pygments**
|
33
|
-
|
34
|
-
.. toctree::
|
35
|
-
:maxdepth: 1
|
36
|
-
|
37
|
-
lexerdevelopment
|
38
|
-
formatterdevelopment
|
39
|
-
filterdevelopment
|
40
|
-
plugins
|
41
|
-
|
42
|
-
**Hints and tricks**
|
43
|
-
|
44
|
-
.. toctree::
|
45
|
-
:maxdepth: 1
|
46
|
-
|
47
|
-
rstdirective
|
48
|
-
moinmoin
|
49
|
-
java
|
50
|
-
integrate
|
51
|
-
|
52
|
-
**About Pygments**
|
53
|
-
|
54
|
-
.. toctree::
|
55
|
-
:maxdepth: 1
|
56
|
-
|
57
|
-
changelog
|
58
|
-
authors
|
59
|
-
|
60
|
-
|
61
|
-
If you find bugs or have suggestions for the documentation, please look
|
62
|
-
:ref:`here <contribute>` for info on how to contact the team.
|
63
|
-
|
64
|
-
.. XXX You can download an offline version of this documentation from the
|
65
|
-
:doc:`download page </download>`.
|
66
|
-
|
@@ -1,40 +0,0 @@
|
|
1
|
-
.. -*- mode: rst -*-
|
2
|
-
|
3
|
-
===================================
|
4
|
-
Using Pygments in various scenarios
|
5
|
-
===================================
|
6
|
-
|
7
|
-
Markdown
|
8
|
-
--------
|
9
|
-
|
10
|
-
Since Pygments 0.9, the distribution ships Markdown_ preprocessor sample code
|
11
|
-
that uses Pygments to render source code in
|
12
|
-
:file:`external/markdown-processor.py`. You can copy and adapt it to your
|
13
|
-
liking.
|
14
|
-
|
15
|
-
.. _Markdown: http://www.freewisdom.org/projects/python-markdown/
|
16
|
-
|
17
|
-
TextMate
|
18
|
-
--------
|
19
|
-
|
20
|
-
Antonio Cangiano has created a Pygments bundle for TextMate that allows to
|
21
|
-
colorize code via a simple menu option. It can be found here_.
|
22
|
-
|
23
|
-
.. _here: http://antoniocangiano.com/2008/10/28/pygments-textmate-bundle/
|
24
|
-
|
25
|
-
Bash completion
|
26
|
-
---------------
|
27
|
-
|
28
|
-
The source distribution contains a file ``external/pygments.bashcomp`` that
|
29
|
-
sets up completion for the ``pygmentize`` command in bash.
|
30
|
-
|
31
|
-
Wrappers for other languages
|
32
|
-
----------------------------
|
33
|
-
|
34
|
-
These libraries provide Pygments highlighting for users of other languages
|
35
|
-
than Python:
|
36
|
-
|
37
|
-
* `pygments.rb <https://github.com/tmm1/pygments.rb>`_, a pygments wrapper for Ruby
|
38
|
-
* `Clygments <https://github.com/bfontaine/clygments>`_, a pygments wrapper for
|
39
|
-
Clojure
|
40
|
-
* `PHPygments <https://github.com/capynet/PHPygments>`_, a pygments wrapper for PHP
|
@@ -1,70 +0,0 @@
|
|
1
|
-
=====================
|
2
|
-
Use Pygments in Java
|
3
|
-
=====================
|
4
|
-
|
5
|
-
Thanks to `Jython <http://www.jython.org>`_ it is possible to use Pygments in
|
6
|
-
Java.
|
7
|
-
|
8
|
-
This page is a simple tutorial to get an idea of how this works. You can
|
9
|
-
then look at the `Jython documentation <http://www.jython.org/docs/>`_ for more
|
10
|
-
advanced uses.
|
11
|
-
|
12
|
-
Since version 1.5, Pygments is deployed on `Maven Central
|
13
|
-
<http://repo1.maven.org/maven2/org/pygments/pygments/>`_ as a JAR, as is Jython
|
14
|
-
which makes it a lot easier to create a Java project.
|
15
|
-
|
16
|
-
Here is an example of a `Maven <http://www.maven.org>`_ ``pom.xml`` file for a
|
17
|
-
project running Pygments:
|
18
|
-
|
19
|
-
.. sourcecode:: xml
|
20
|
-
|
21
|
-
<?xml version="1.0" encoding="UTF-8"?>
|
22
|
-
|
23
|
-
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
24
|
-
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
25
|
-
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
|
26
|
-
http://maven.apache.org/maven-v4_0_0.xsd">
|
27
|
-
<modelVersion>4.0.0</modelVersion>
|
28
|
-
<groupId>example</groupId>
|
29
|
-
<artifactId>example</artifactId>
|
30
|
-
<version>1.0-SNAPSHOT</version>
|
31
|
-
<dependencies>
|
32
|
-
<dependency>
|
33
|
-
<groupId>org.python</groupId>
|
34
|
-
<artifactId>jython-standalone</artifactId>
|
35
|
-
<version>2.5.3</version>
|
36
|
-
</dependency>
|
37
|
-
<dependency>
|
38
|
-
<groupId>org.pygments</groupId>
|
39
|
-
<artifactId>pygments</artifactId>
|
40
|
-
<version>1.5</version>
|
41
|
-
<scope>runtime</scope>
|
42
|
-
</dependency>
|
43
|
-
</dependencies>
|
44
|
-
</project>
|
45
|
-
|
46
|
-
The following Java example:
|
47
|
-
|
48
|
-
.. sourcecode:: java
|
49
|
-
|
50
|
-
PythonInterpreter interpreter = new PythonInterpreter();
|
51
|
-
|
52
|
-
// Set a variable with the content you want to work with
|
53
|
-
interpreter.set("code", code);
|
54
|
-
|
55
|
-
// Simple use Pygments as you would in Python
|
56
|
-
interpreter.exec("from pygments import highlight\n"
|
57
|
-
+ "from pygments.lexers import PythonLexer\n"
|
58
|
-
+ "from pygments.formatters import HtmlFormatter\n"
|
59
|
-
+ "\nresult = highlight(code, PythonLexer(), HtmlFormatter())");
|
60
|
-
|
61
|
-
// Get the result that has been set in a variable
|
62
|
-
System.out.println(interpreter.get("result", String.class));
|
63
|
-
|
64
|
-
will print something like:
|
65
|
-
|
66
|
-
.. sourcecode:: html
|
67
|
-
|
68
|
-
<div class="highlight">
|
69
|
-
<pre><span class="k">print</span> <span class="s">"Hello World"</span></pre>
|
70
|
-
</div>
|
@@ -1,728 +0,0 @@
|
|
1
|
-
.. -*- mode: rst -*-
|
2
|
-
|
3
|
-
.. highlight:: python
|
4
|
-
|
5
|
-
====================
|
6
|
-
Write your own lexer
|
7
|
-
====================
|
8
|
-
|
9
|
-
If a lexer for your favorite language is missing in the Pygments package, you
|
10
|
-
can easily write your own and extend Pygments.
|
11
|
-
|
12
|
-
All you need can be found inside the :mod:`pygments.lexer` module. As you can
|
13
|
-
read in the :doc:`API documentation <api>`, a lexer is a class that is
|
14
|
-
initialized with some keyword arguments (the lexer options) and that provides a
|
15
|
-
:meth:`.get_tokens_unprocessed()` method which is given a string or unicode
|
16
|
-
object with the data to lex.
|
17
|
-
|
18
|
-
The :meth:`.get_tokens_unprocessed()` method must return an iterator or iterable
|
19
|
-
containing tuples in the form ``(index, token, value)``. Normally you don't
|
20
|
-
need to do this since there are base lexers that do most of the work and that
|
21
|
-
you can subclass.
|
22
|
-
|
23
|
-
|
24
|
-
RegexLexer
|
25
|
-
==========
|
26
|
-
|
27
|
-
The lexer base class used by almost all of Pygments' lexers is the
|
28
|
-
:class:`RegexLexer`. This class allows you to define lexing rules in terms of
|
29
|
-
*regular expressions* for different *states*.
|
30
|
-
|
31
|
-
States are groups of regular expressions that are matched against the input
|
32
|
-
string at the *current position*. If one of these expressions matches, a
|
33
|
-
corresponding action is performed (such as yielding a token with a specific
|
34
|
-
type, or changing state), the current position is set to where the last match
|
35
|
-
ended and the matching process continues with the first regex of the current
|
36
|
-
state.
|
37
|
-
|
38
|
-
Lexer states are kept on a stack: each time a new state is entered, the new
|
39
|
-
state is pushed onto the stack. The most basic lexers (like the `DiffLexer`)
|
40
|
-
just need one state.
|
41
|
-
|
42
|
-
Each state is defined as a list of tuples in the form (`regex`, `action`,
|
43
|
-
`new_state`) where the last item is optional. In the most basic form, `action`
|
44
|
-
is a token type (like `Name.Builtin`). That means: When `regex` matches, emit a
|
45
|
-
token with the match text and type `tokentype` and push `new_state` on the state
|
46
|
-
stack. If the new state is ``'#pop'``, the topmost state is popped from the
|
47
|
-
stack instead. To pop more than one state, use ``'#pop:2'`` and so on.
|
48
|
-
``'#push'`` is a synonym for pushing the current state on the stack.
|
49
|
-
|
50
|
-
The following example shows the `DiffLexer` from the builtin lexers. Note that
|
51
|
-
it contains some additional attributes `name`, `aliases` and `filenames` which
|
52
|
-
aren't required for a lexer. They are used by the builtin lexer lookup
|
53
|
-
functions. ::
|
54
|
-
|
55
|
-
from pygments.lexer import RegexLexer
|
56
|
-
from pygments.token import *
|
57
|
-
|
58
|
-
class DiffLexer(RegexLexer):
|
59
|
-
name = 'Diff'
|
60
|
-
aliases = ['diff']
|
61
|
-
filenames = ['*.diff']
|
62
|
-
|
63
|
-
tokens = {
|
64
|
-
'root': [
|
65
|
-
(r' .*\n', Text),
|
66
|
-
(r'\+.*\n', Generic.Inserted),
|
67
|
-
(r'-.*\n', Generic.Deleted),
|
68
|
-
(r'@.*\n', Generic.Subheading),
|
69
|
-
(r'Index.*\n', Generic.Heading),
|
70
|
-
(r'=.*\n', Generic.Heading),
|
71
|
-
(r'.*\n', Text),
|
72
|
-
]
|
73
|
-
}
|
74
|
-
|
75
|
-
As you can see this lexer only uses one state. When the lexer starts scanning
|
76
|
-
the text, it first checks if the current character is a space. If this is true
|
77
|
-
it scans everything until newline and returns the data as a `Text` token (which
|
78
|
-
is the "no special highlighting" token).
|
79
|
-
|
80
|
-
If this rule doesn't match, it checks if the current char is a plus sign. And
|
81
|
-
so on.
|
82
|
-
|
83
|
-
If no rule matches at the current position, the current char is emitted as an
|
84
|
-
`Error` token that indicates a lexing error, and the position is increased by
|
85
|
-
one.
|
86
|
-
|
87
|
-
|
88
|
-
Adding and testing a new lexer
|
89
|
-
==============================
|
90
|
-
|
91
|
-
The easiest way to use a new lexer is to use Pygments' support for loading
|
92
|
-
the lexer from a file relative to your current directory.
|
93
|
-
|
94
|
-
First, change the name of your lexer class to CustomLexer:
|
95
|
-
|
96
|
-
.. code-block:: python
|
97
|
-
|
98
|
-
from pygments.lexer import RegexLexer
|
99
|
-
from pygments.token import *
|
100
|
-
|
101
|
-
class CustomLexer(RegexLexer):
|
102
|
-
"""All your lexer code goes here!"""
|
103
|
-
|
104
|
-
Then you can load the lexer from the command line with the additional
|
105
|
-
flag ``-x``:
|
106
|
-
|
107
|
-
.. code-block:: console
|
108
|
-
|
109
|
-
$ pygmentize -l your_lexer_file.py -x
|
110
|
-
|
111
|
-
To specify a class name other than CustomLexer, append it with a colon:
|
112
|
-
|
113
|
-
.. code-block:: console
|
114
|
-
|
115
|
-
$ pygmentize -l your_lexer.py:SomeLexer -x
|
116
|
-
|
117
|
-
Or, using the Python API:
|
118
|
-
|
119
|
-
.. code-block:: python
|
120
|
-
|
121
|
-
# For a lexer named CustomLexer
|
122
|
-
your_lexer = load_lexer_from_file(filename, **options)
|
123
|
-
|
124
|
-
# For a lexer named MyNewLexer
|
125
|
-
your_named_lexer = load_lexer_from_file(filename, "MyNewLexer", **options)
|
126
|
-
|
127
|
-
When loading custom lexers and formatters, be extremely careful to use only
|
128
|
-
trusted files; Pygments will perform the equivalent of ``eval`` on them.
|
129
|
-
|
130
|
-
If you only want to use your lexer with the Pygments API, you can import and
|
131
|
-
instantiate the lexer yourself, then pass it to :func:`pygments.highlight`.
|
132
|
-
|
133
|
-
To prepare your new lexer for inclusion in the Pygments distribution, so that it
|
134
|
-
will be found when passing filenames or lexer aliases from the command line, you
|
135
|
-
have to perform the following steps.
|
136
|
-
|
137
|
-
First, change to the current directory containing the Pygments source code. You
|
138
|
-
will need to have either an unpacked source tarball, or (preferably) a copy
|
139
|
-
cloned from BitBucket.
|
140
|
-
|
141
|
-
.. code-block:: console
|
142
|
-
|
143
|
-
$ cd .../pygments-main
|
144
|
-
|
145
|
-
Select a matching module under ``pygments/lexers``, or create a new module for
|
146
|
-
your lexer class.
|
147
|
-
|
148
|
-
Next, make sure the lexer is known from outside of the module. All modules in
|
149
|
-
the ``pygments.lexers`` package specify ``__all__``. For example,
|
150
|
-
``esoteric.py`` sets::
|
151
|
-
|
152
|
-
__all__ = ['BrainfuckLexer', 'BefungeLexer', ...]
|
153
|
-
|
154
|
-
Add the name of your lexer class to this list (or create the list if your lexer
|
155
|
-
is the only class in the module).
|
156
|
-
|
157
|
-
Finally the lexer can be made publicly known by rebuilding the lexer mapping:
|
158
|
-
|
159
|
-
.. code-block:: console
|
160
|
-
|
161
|
-
$ make mapfiles
|
162
|
-
|
163
|
-
To test the new lexer, store an example file with the proper extension in
|
164
|
-
``tests/examplefiles``. For example, to test your ``DiffLexer``, add a
|
165
|
-
``tests/examplefiles/example.diff`` containing a sample diff output.
|
166
|
-
|
167
|
-
Now you can use pygmentize to render your example to HTML:
|
168
|
-
|
169
|
-
.. code-block:: console
|
170
|
-
|
171
|
-
$ ./pygmentize -O full -f html -o /tmp/example.html tests/examplefiles/example.diff
|
172
|
-
|
173
|
-
Note that this explicitly calls the ``pygmentize`` in the current directory
|
174
|
-
by preceding it with ``./``. This ensures your modifications are used.
|
175
|
-
Otherwise a possibly already installed, unmodified version without your new
|
176
|
-
lexer would have been called from the system search path (``$PATH``).
|
177
|
-
|
178
|
-
To view the result, open ``/tmp/example.html`` in your browser.
|
179
|
-
|
180
|
-
Once the example renders as expected, you should run the complete test suite:
|
181
|
-
|
182
|
-
.. code-block:: console
|
183
|
-
|
184
|
-
$ make test
|
185
|
-
|
186
|
-
It also tests that your lexer fulfills the lexer API and certain invariants,
|
187
|
-
such as that the concatenation of all token text is the same as the input text.
|
188
|
-
|
189
|
-
|
190
|
-
Regex Flags
|
191
|
-
===========
|
192
|
-
|
193
|
-
You can either define regex flags locally in the regex (``r'(?x)foo bar'``) or
|
194
|
-
globally by adding a `flags` attribute to your lexer class. If no attribute is
|
195
|
-
defined, it defaults to `re.MULTILINE`. For more information about regular
|
196
|
-
expression flags see the page about `regular expressions`_ in the Python
|
197
|
-
documentation.
|
198
|
-
|
199
|
-
.. _regular expressions: http://docs.python.org/library/re.html#regular-expression-syntax
|
200
|
-
|
201
|
-
|
202
|
-
Scanning multiple tokens at once
|
203
|
-
================================
|
204
|
-
|
205
|
-
So far, the `action` element in the rule tuple of regex, action and state has
|
206
|
-
been a single token type. Now we look at the first of several other possible
|
207
|
-
values.
|
208
|
-
|
209
|
-
Here is a more complex lexer that highlights INI files. INI files consist of
|
210
|
-
sections, comments and ``key = value`` pairs::
|
211
|
-
|
212
|
-
from pygments.lexer import RegexLexer, bygroups
|
213
|
-
from pygments.token import *
|
214
|
-
|
215
|
-
class IniLexer(RegexLexer):
|
216
|
-
name = 'INI'
|
217
|
-
aliases = ['ini', 'cfg']
|
218
|
-
filenames = ['*.ini', '*.cfg']
|
219
|
-
|
220
|
-
tokens = {
|
221
|
-
'root': [
|
222
|
-
(r'\s+', Text),
|
223
|
-
(r';.*?$', Comment),
|
224
|
-
(r'\[.*?\]$', Keyword),
|
225
|
-
(r'(.*?)(\s*)(=)(\s*)(.*?)$',
|
226
|
-
bygroups(Name.Attribute, Text, Operator, Text, String))
|
227
|
-
]
|
228
|
-
}
|
229
|
-
|
230
|
-
The lexer first looks for whitespace, comments and section names. Later it
|
231
|
-
looks for a line that looks like a key, value pair, separated by an ``'='``
|
232
|
-
sign, and optional whitespace.
|
233
|
-
|
234
|
-
The `bygroups` helper yields each capturing group in the regex with a different
|
235
|
-
token type. First the `Name.Attribute` token, then a `Text` token for the
|
236
|
-
optional whitespace, after that a `Operator` token for the equals sign. Then a
|
237
|
-
`Text` token for the whitespace again. The rest of the line is returned as
|
238
|
-
`String`.
|
239
|
-
|
240
|
-
Note that for this to work, every part of the match must be inside a capturing
|
241
|
-
group (a ``(...)``), and there must not be any nested capturing groups. If you
|
242
|
-
nevertheless need a group, use a non-capturing group defined using this syntax:
|
243
|
-
``(?:some|words|here)`` (note the ``?:`` after the beginning parenthesis).
|
244
|
-
|
245
|
-
If you find yourself needing a capturing group inside the regex which shouldn't
|
246
|
-
be part of the output but is used in the regular expressions for backreferencing
|
247
|
-
(eg: ``r'(<(foo|bar)>)(.*?)(</\2>)'``), you can pass `None` to the bygroups
|
248
|
-
function and that group will be skipped in the output.
|
249
|
-
|
250
|
-
|
251
|
-
Changing states
|
252
|
-
===============
|
253
|
-
|
254
|
-
Many lexers need multiple states to work as expected. For example, some
|
255
|
-
languages allow multiline comments to be nested. Since this is a recursive
|
256
|
-
pattern it's impossible to lex just using regular expressions.
|
257
|
-
|
258
|
-
Here is a lexer that recognizes C++ style comments (multi-line with ``/* */``
|
259
|
-
and single-line with ``//`` until end of line)::
|
260
|
-
|
261
|
-
from pygments.lexer import RegexLexer
|
262
|
-
from pygments.token import *
|
263
|
-
|
264
|
-
class CppCommentLexer(RegexLexer):
|
265
|
-
name = 'Example Lexer with states'
|
266
|
-
|
267
|
-
tokens = {
|
268
|
-
'root': [
|
269
|
-
(r'[^/]+', Text),
|
270
|
-
(r'/\*', Comment.Multiline, 'comment'),
|
271
|
-
(r'//.*?$', Comment.Singleline),
|
272
|
-
(r'/', Text)
|
273
|
-
],
|
274
|
-
'comment': [
|
275
|
-
(r'[^*/]', Comment.Multiline),
|
276
|
-
(r'/\*', Comment.Multiline, '#push'),
|
277
|
-
(r'\*/', Comment.Multiline, '#pop'),
|
278
|
-
(r'[*/]', Comment.Multiline)
|
279
|
-
]
|
280
|
-
}
|
281
|
-
|
282
|
-
This lexer starts lexing in the ``'root'`` state. It tries to match as much as
|
283
|
-
possible until it finds a slash (``'/'``). If the next character after the slash
|
284
|
-
is an asterisk (``'*'``) the `RegexLexer` sends those two characters to the
|
285
|
-
output stream marked as `Comment.Multiline` and continues lexing with the rules
|
286
|
-
defined in the ``'comment'`` state.
|
287
|
-
|
288
|
-
If there wasn't an asterisk after the slash, the `RegexLexer` checks if it's a
|
289
|
-
Singleline comment (i.e. followed by a second slash). If this also wasn't the
|
290
|
-
case it must be a single slash, which is not a comment starter (the separate
|
291
|
-
regex for a single slash must also be given, else the slash would be marked as
|
292
|
-
an error token).
|
293
|
-
|
294
|
-
Inside the ``'comment'`` state, we do the same thing again. Scan until the
|
295
|
-
lexer finds a star or slash. If it's the opening of a multiline comment, push
|
296
|
-
the ``'comment'`` state on the stack and continue scanning, again in the
|
297
|
-
``'comment'`` state. Else, check if it's the end of the multiline comment. If
|
298
|
-
yes, pop one state from the stack.
|
299
|
-
|
300
|
-
Note: If you pop from an empty stack you'll get an `IndexError`. (There is an
|
301
|
-
easy way to prevent this from happening: don't ``'#pop'`` in the root state).
|
302
|
-
|
303
|
-
If the `RegexLexer` encounters a newline that is flagged as an error token, the
|
304
|
-
stack is emptied and the lexer continues scanning in the ``'root'`` state. This
|
305
|
-
can help producing error-tolerant highlighting for erroneous input, e.g. when a
|
306
|
-
single-line string is not closed.
|
307
|
-
|
308
|
-
|
309
|
-
Advanced state tricks
|
310
|
-
=====================
|
311
|
-
|
312
|
-
There are a few more things you can do with states:
|
313
|
-
|
314
|
-
- You can push multiple states onto the stack if you give a tuple instead of a
|
315
|
-
simple string as the third item in a rule tuple. For example, if you want to
|
316
|
-
match a comment containing a directive, something like:
|
317
|
-
|
318
|
-
.. code-block:: text
|
319
|
-
|
320
|
-
/* <processing directive> rest of comment */
|
321
|
-
|
322
|
-
you can use this rule::
|
323
|
-
|
324
|
-
tokens = {
|
325
|
-
'root': [
|
326
|
-
(r'/\* <', Comment, ('comment', 'directive')),
|
327
|
-
...
|
328
|
-
],
|
329
|
-
'directive': [
|
330
|
-
(r'[^>]*', Comment.Directive),
|
331
|
-
(r'>', Comment, '#pop'),
|
332
|
-
],
|
333
|
-
'comment': [
|
334
|
-
(r'[^*]+', Comment),
|
335
|
-
(r'\*/', Comment, '#pop'),
|
336
|
-
(r'\*', Comment),
|
337
|
-
]
|
338
|
-
}
|
339
|
-
|
340
|
-
When this encounters the above sample, first ``'comment'`` and ``'directive'``
|
341
|
-
are pushed onto the stack, then the lexer continues in the directive state
|
342
|
-
until it finds the closing ``>``, then it continues in the comment state until
|
343
|
-
the closing ``*/``. Then, both states are popped from the stack again and
|
344
|
-
lexing continues in the root state.
|
345
|
-
|
346
|
-
.. versionadded:: 0.9
|
347
|
-
The tuple can contain the special ``'#push'`` and ``'#pop'`` (but not
|
348
|
-
``'#pop:n'``) directives.
|
349
|
-
|
350
|
-
|
351
|
-
- You can include the rules of a state in the definition of another. This is
|
352
|
-
done by using `include` from `pygments.lexer`::
|
353
|
-
|
354
|
-
from pygments.lexer import RegexLexer, bygroups, include
|
355
|
-
from pygments.token import *
|
356
|
-
|
357
|
-
class ExampleLexer(RegexLexer):
|
358
|
-
tokens = {
|
359
|
-
'comments': [
|
360
|
-
(r'/\*.*?\*/', Comment),
|
361
|
-
(r'//.*?\n', Comment),
|
362
|
-
],
|
363
|
-
'root': [
|
364
|
-
include('comments'),
|
365
|
-
(r'(function )(\w+)( {)',
|
366
|
-
bygroups(Keyword, Name, Keyword), 'function'),
|
367
|
-
(r'.', Text),
|
368
|
-
],
|
369
|
-
'function': [
|
370
|
-
(r'[^}/]+', Text),
|
371
|
-
include('comments'),
|
372
|
-
(r'/', Text),
|
373
|
-
(r'\}', Keyword, '#pop'),
|
374
|
-
]
|
375
|
-
}
|
376
|
-
|
377
|
-
This is a hypothetical lexer for a language that consist of functions and
|
378
|
-
comments. Because comments can occur at toplevel and in functions, we need
|
379
|
-
rules for comments in both states. As you can see, the `include` helper saves
|
380
|
-
repeating rules that occur more than once (in this example, the state
|
381
|
-
``'comment'`` will never be entered by the lexer, as it's only there to be
|
382
|
-
included in ``'root'`` and ``'function'``).
|
383
|
-
|
384
|
-
- Sometimes, you may want to "combine" a state from existing ones. This is
|
385
|
-
possible with the `combined` helper from `pygments.lexer`.
|
386
|
-
|
387
|
-
If you, instead of a new state, write ``combined('state1', 'state2')`` as the
|
388
|
-
third item of a rule tuple, a new anonymous state will be formed from state1
|
389
|
-
and state2 and if the rule matches, the lexer will enter this state.
|
390
|
-
|
391
|
-
This is not used very often, but can be helpful in some cases, such as the
|
392
|
-
`PythonLexer`'s string literal processing.
|
393
|
-
|
394
|
-
- If you want your lexer to start lexing in a different state you can modify the
|
395
|
-
stack by overriding the `get_tokens_unprocessed()` method::
|
396
|
-
|
397
|
-
from pygments.lexer import RegexLexer
|
398
|
-
|
399
|
-
class ExampleLexer(RegexLexer):
|
400
|
-
tokens = {...}
|
401
|
-
|
402
|
-
def get_tokens_unprocessed(self, text, stack=('root', 'otherstate')):
|
403
|
-
for item in RegexLexer.get_tokens_unprocessed(self, text, stack):
|
404
|
-
yield item
|
405
|
-
|
406
|
-
Some lexers like the `PhpLexer` use this to make the leading ``<?php``
|
407
|
-
preprocessor comments optional. Note that you can crash the lexer easily by
|
408
|
-
putting values into the stack that don't exist in the token map. Also
|
409
|
-
removing ``'root'`` from the stack can result in strange errors!
|
410
|
-
|
411
|
-
- In some lexers, a state should be popped if anything is encountered that isn't
|
412
|
-
matched by a rule in the state. You could use an empty regex at the end of
|
413
|
-
the state list, but Pygments provides a more obvious way of spelling that:
|
414
|
-
``default('#pop')`` is equivalent to ``('', Text, '#pop')``.
|
415
|
-
|
416
|
-
.. versionadded:: 2.0
|
417
|
-
|
418
|
-
|
419
|
-
Subclassing lexers derived from RegexLexer
|
420
|
-
==========================================
|
421
|
-
|
422
|
-
.. versionadded:: 1.6
|
423
|
-
|
424
|
-
Sometimes multiple languages are very similar, but should still be lexed by
|
425
|
-
different lexer classes.
|
426
|
-
|
427
|
-
When subclassing a lexer derived from RegexLexer, the ``tokens`` dictionaries
|
428
|
-
defined in the parent and child class are merged. For example::
|
429
|
-
|
430
|
-
from pygments.lexer import RegexLexer, inherit
|
431
|
-
from pygments.token import *
|
432
|
-
|
433
|
-
class BaseLexer(RegexLexer):
|
434
|
-
tokens = {
|
435
|
-
'root': [
|
436
|
-
('[a-z]+', Name),
|
437
|
-
(r'/\*', Comment, 'comment'),
|
438
|
-
('"', String, 'string'),
|
439
|
-
('\s+', Text),
|
440
|
-
],
|
441
|
-
'string': [
|
442
|
-
('[^"]+', String),
|
443
|
-
('"', String, '#pop'),
|
444
|
-
],
|
445
|
-
'comment': [
|
446
|
-
...
|
447
|
-
],
|
448
|
-
}
|
449
|
-
|
450
|
-
class DerivedLexer(BaseLexer):
|
451
|
-
tokens = {
|
452
|
-
'root': [
|
453
|
-
('[0-9]+', Number),
|
454
|
-
inherit,
|
455
|
-
],
|
456
|
-
'string': [
|
457
|
-
(r'[^"\\]+', String),
|
458
|
-
(r'\\.', String.Escape),
|
459
|
-
('"', String, '#pop'),
|
460
|
-
],
|
461
|
-
}
|
462
|
-
|
463
|
-
The `BaseLexer` defines two states, lexing names and strings. The
|
464
|
-
`DerivedLexer` defines its own tokens dictionary, which extends the definitions
|
465
|
-
of the base lexer:
|
466
|
-
|
467
|
-
* The "root" state has an additional rule and then the special object `inherit`,
|
468
|
-
which tells Pygments to insert the token definitions of the parent class at
|
469
|
-
that point.
|
470
|
-
|
471
|
-
* The "string" state is replaced entirely, since there is not `inherit` rule.
|
472
|
-
|
473
|
-
* The "comment" state is inherited entirely.
|
474
|
-
|
475
|
-
|
476
|
-
Using multiple lexers
|
477
|
-
=====================
|
478
|
-
|
479
|
-
Using multiple lexers for the same input can be tricky. One of the easiest
|
480
|
-
combination techniques is shown here: You can replace the action entry in a rule
|
481
|
-
tuple with a lexer class. The matched text will then be lexed with that lexer,
|
482
|
-
and the resulting tokens will be yielded.
|
483
|
-
|
484
|
-
For example, look at this stripped-down HTML lexer::
|
485
|
-
|
486
|
-
from pygments.lexer import RegexLexer, bygroups, using
|
487
|
-
from pygments.token import *
|
488
|
-
from pygments.lexers.javascript import JavascriptLexer
|
489
|
-
|
490
|
-
class HtmlLexer(RegexLexer):
|
491
|
-
name = 'HTML'
|
492
|
-
aliases = ['html']
|
493
|
-
filenames = ['*.html', '*.htm']
|
494
|
-
|
495
|
-
flags = re.IGNORECASE | re.DOTALL
|
496
|
-
tokens = {
|
497
|
-
'root': [
|
498
|
-
('[^<&]+', Text),
|
499
|
-
('&.*?;', Name.Entity),
|
500
|
-
(r'<\s*script\s*', Name.Tag, ('script-content', 'tag')),
|
501
|
-
(r'<\s*[a-zA-Z0-9:]+', Name.Tag, 'tag'),
|
502
|
-
(r'<\s*/\s*[a-zA-Z0-9:]+\s*>', Name.Tag),
|
503
|
-
],
|
504
|
-
'script-content': [
|
505
|
-
(r'(.+?)(<\s*/\s*script\s*>)',
|
506
|
-
bygroups(using(JavascriptLexer), Name.Tag),
|
507
|
-
'#pop'),
|
508
|
-
]
|
509
|
-
}
|
510
|
-
|
511
|
-
Here the content of a ``<script>`` tag is passed to a newly created instance of
|
512
|
-
a `JavascriptLexer` and not processed by the `HtmlLexer`. This is done using
|
513
|
-
the `using` helper that takes the other lexer class as its parameter.
|
514
|
-
|
515
|
-
Note the combination of `bygroups` and `using`. This makes sure that the
|
516
|
-
content up to the ``</script>`` end tag is processed by the `JavascriptLexer`,
|
517
|
-
while the end tag is yielded as a normal token with the `Name.Tag` type.
|
518
|
-
|
519
|
-
Also note the ``(r'<\s*script\s*', Name.Tag, ('script-content', 'tag'))`` rule.
|
520
|
-
Here, two states are pushed onto the state stack, ``'script-content'`` and
|
521
|
-
``'tag'``. That means that first ``'tag'`` is processed, which will lex
|
522
|
-
attributes and the closing ``>``, then the ``'tag'`` state is popped and the
|
523
|
-
next state on top of the stack will be ``'script-content'``.
|
524
|
-
|
525
|
-
Since you cannot refer to the class currently being defined, use `this`
|
526
|
-
(imported from `pygments.lexer`) to refer to the current lexer class, i.e.
|
527
|
-
``using(this)``. This construct may seem unnecessary, but this is often the
|
528
|
-
most obvious way of lexing arbitrary syntax between fixed delimiters without
|
529
|
-
introducing deeply nested states.
|
530
|
-
|
531
|
-
The `using()` helper has a special keyword argument, `state`, which works as
|
532
|
-
follows: if given, the lexer to use initially is not in the ``"root"`` state,
|
533
|
-
but in the state given by this argument. This does not work with advanced
|
534
|
-
`RegexLexer` subclasses such as `ExtendedRegexLexer` (see below).
|
535
|
-
|
536
|
-
Any other keywords arguments passed to `using()` are added to the keyword
|
537
|
-
arguments used to create the lexer.
|
538
|
-
|
539
|
-
|
540
|
-
Delegating Lexer
|
541
|
-
================
|
542
|
-
|
543
|
-
Another approach for nested lexers is the `DelegatingLexer` which is for example
|
544
|
-
used for the template engine lexers. It takes two lexers as arguments on
|
545
|
-
initialisation: a `root_lexer` and a `language_lexer`.
|
546
|
-
|
547
|
-
The input is processed as follows: First, the whole text is lexed with the
|
548
|
-
`language_lexer`. All tokens yielded with the special type of ``Other`` are
|
549
|
-
then concatenated and given to the `root_lexer`. The language tokens of the
|
550
|
-
`language_lexer` are then inserted into the `root_lexer`'s token stream at the
|
551
|
-
appropriate positions. ::
|
552
|
-
|
553
|
-
from pygments.lexer import DelegatingLexer
|
554
|
-
from pygments.lexers.web import HtmlLexer, PhpLexer
|
555
|
-
|
556
|
-
class HtmlPhpLexer(DelegatingLexer):
|
557
|
-
def __init__(self, **options):
|
558
|
-
super(HtmlPhpLexer, self).__init__(HtmlLexer, PhpLexer, **options)
|
559
|
-
|
560
|
-
This procedure ensures that e.g. HTML with template tags in it is highlighted
|
561
|
-
correctly even if the template tags are put into HTML tags or attributes.
|
562
|
-
|
563
|
-
If you want to change the needle token ``Other`` to something else, you can give
|
564
|
-
the lexer another token type as the third parameter::
|
565
|
-
|
566
|
-
DelegatingLexer.__init__(MyLexer, OtherLexer, Text, **options)
|
567
|
-
|
568
|
-
|
569
|
-
Callbacks
|
570
|
-
=========
|
571
|
-
|
572
|
-
Sometimes the grammar of a language is so complex that a lexer would be unable
|
573
|
-
to process it just by using regular expressions and stacks.
|
574
|
-
|
575
|
-
For this, the `RegexLexer` allows callbacks to be given in rule tuples, instead
|
576
|
-
of token types (`bygroups` and `using` are nothing else but preimplemented
|
577
|
-
callbacks). The callback must be a function taking two arguments:
|
578
|
-
|
579
|
-
* the lexer itself
|
580
|
-
* the match object for the last matched rule
|
581
|
-
|
582
|
-
The callback must then return an iterable of (or simply yield) ``(index,
|
583
|
-
tokentype, value)`` tuples, which are then just passed through by
|
584
|
-
`get_tokens_unprocessed()`. The ``index`` here is the position of the token in
|
585
|
-
the input string, ``tokentype`` is the normal token type (like `Name.Builtin`),
|
586
|
-
and ``value`` the associated part of the input string.
|
587
|
-
|
588
|
-
You can see an example here::
|
589
|
-
|
590
|
-
from pygments.lexer import RegexLexer
|
591
|
-
from pygments.token import Generic
|
592
|
-
|
593
|
-
class HypotheticLexer(RegexLexer):
|
594
|
-
|
595
|
-
def headline_callback(lexer, match):
|
596
|
-
equal_signs = match.group(1)
|
597
|
-
text = match.group(2)
|
598
|
-
yield match.start(), Generic.Headline, equal_signs + text + equal_signs
|
599
|
-
|
600
|
-
tokens = {
|
601
|
-
'root': [
|
602
|
-
(r'(=+)(.*?)(\1)', headline_callback)
|
603
|
-
]
|
604
|
-
}
|
605
|
-
|
606
|
-
If the regex for the `headline_callback` matches, the function is called with
|
607
|
-
the match object. Note that after the callback is done, processing continues
|
608
|
-
normally, that is, after the end of the previous match. The callback has no
|
609
|
-
possibility to influence the position.
|
610
|
-
|
611
|
-
There are not really any simple examples for lexer callbacks, but you can see
|
612
|
-
them in action e.g. in the `SMLLexer` class in `ml.py`_.
|
613
|
-
|
614
|
-
.. _ml.py: http://bitbucket.org/birkenfeld/pygments-main/src/tip/pygments/lexers/ml.py
|
615
|
-
|
616
|
-
|
617
|
-
The ExtendedRegexLexer class
|
618
|
-
============================
|
619
|
-
|
620
|
-
The `RegexLexer`, even with callbacks, unfortunately isn't powerful enough for
|
621
|
-
the funky syntax rules of languages such as Ruby.
|
622
|
-
|
623
|
-
But fear not; even then you don't have to abandon the regular expression
|
624
|
-
approach: Pygments has a subclass of `RegexLexer`, the `ExtendedRegexLexer`.
|
625
|
-
All features known from RegexLexers are available here too, and the tokens are
|
626
|
-
specified in exactly the same way, *except* for one detail:
|
627
|
-
|
628
|
-
The `get_tokens_unprocessed()` method holds its internal state data not as local
|
629
|
-
variables, but in an instance of the `pygments.lexer.LexerContext` class, and
|
630
|
-
that instance is passed to callbacks as a third argument. This means that you
|
631
|
-
can modify the lexer state in callbacks.
|
632
|
-
|
633
|
-
The `LexerContext` class has the following members:
|
634
|
-
|
635
|
-
* `text` -- the input text
|
636
|
-
* `pos` -- the current starting position that is used for matching regexes
|
637
|
-
* `stack` -- a list containing the state stack
|
638
|
-
* `end` -- the maximum position to which regexes are matched, this defaults to
|
639
|
-
the length of `text`
|
640
|
-
|
641
|
-
Additionally, the `get_tokens_unprocessed()` method can be given a
|
642
|
-
`LexerContext` instead of a string and will then process this context instead of
|
643
|
-
creating a new one for the string argument.
|
644
|
-
|
645
|
-
Note that because you can set the current position to anything in the callback,
|
646
|
-
it won't be automatically be set by the caller after the callback is finished.
|
647
|
-
For example, this is how the hypothetical lexer above would be written with the
|
648
|
-
`ExtendedRegexLexer`::
|
649
|
-
|
650
|
-
from pygments.lexer import ExtendedRegexLexer
|
651
|
-
from pygments.token import Generic
|
652
|
-
|
653
|
-
class ExHypotheticLexer(ExtendedRegexLexer):
|
654
|
-
|
655
|
-
def headline_callback(lexer, match, ctx):
|
656
|
-
equal_signs = match.group(1)
|
657
|
-
text = match.group(2)
|
658
|
-
yield match.start(), Generic.Headline, equal_signs + text + equal_signs
|
659
|
-
ctx.pos = match.end()
|
660
|
-
|
661
|
-
tokens = {
|
662
|
-
'root': [
|
663
|
-
(r'(=+)(.*?)(\1)', headline_callback)
|
664
|
-
]
|
665
|
-
}
|
666
|
-
|
667
|
-
This might sound confusing (and it can really be). But it is needed, and for an
|
668
|
-
example look at the Ruby lexer in `ruby.py`_.
|
669
|
-
|
670
|
-
.. _ruby.py: https://bitbucket.org/birkenfeld/pygments-main/src/tip/pygments/lexers/ruby.py
|
671
|
-
|
672
|
-
|
673
|
-
Handling Lists of Keywords
|
674
|
-
==========================
|
675
|
-
|
676
|
-
For a relatively short list (hundreds) you can construct an optimized regular
|
677
|
-
expression directly using ``words()`` (longer lists, see next section). This
|
678
|
-
function handles a few things for you automatically, including escaping
|
679
|
-
metacharacters and Python's first-match rather than longest-match in
|
680
|
-
alternations. Feel free to put the lists themselves in
|
681
|
-
``pygments/lexers/_$lang_builtins.py`` (see examples there), and generated by
|
682
|
-
code if possible.
|
683
|
-
|
684
|
-
An example of using ``words()`` is something like::
|
685
|
-
|
686
|
-
from pygments.lexer import RegexLexer, words, Name
|
687
|
-
|
688
|
-
class MyLexer(RegexLexer):
|
689
|
-
|
690
|
-
tokens = {
|
691
|
-
'root': [
|
692
|
-
(words(('else', 'elseif'), suffix=r'\b'), Name.Builtin),
|
693
|
-
(r'\w+', Name),
|
694
|
-
],
|
695
|
-
}
|
696
|
-
|
697
|
-
As you can see, you can add ``prefix`` and ``suffix`` parts to the constructed
|
698
|
-
regex.
|
699
|
-
|
700
|
-
|
701
|
-
Modifying Token Streams
|
702
|
-
=======================
|
703
|
-
|
704
|
-
Some languages ship a lot of builtin functions (for example PHP). The total
|
705
|
-
amount of those functions differs from system to system because not everybody
|
706
|
-
has every extension installed. In the case of PHP there are over 3000 builtin
|
707
|
-
functions. That's an incredibly huge amount of functions, much more than you
|
708
|
-
want to put into a regular expression.
|
709
|
-
|
710
|
-
But because only `Name` tokens can be function names this is solvable by
|
711
|
-
overriding the ``get_tokens_unprocessed()`` method. The following lexer
|
712
|
-
subclasses the `PythonLexer` so that it highlights some additional names as
|
713
|
-
pseudo keywords::
|
714
|
-
|
715
|
-
from pygments.lexers.python import PythonLexer
|
716
|
-
from pygments.token import Name, Keyword
|
717
|
-
|
718
|
-
class MyPythonLexer(PythonLexer):
|
719
|
-
EXTRA_KEYWORDS = set(('foo', 'bar', 'foobar', 'barfoo', 'spam', 'eggs'))
|
720
|
-
|
721
|
-
def get_tokens_unprocessed(self, text):
|
722
|
-
for index, token, value in PythonLexer.get_tokens_unprocessed(self, text):
|
723
|
-
if token is Name and value in self.EXTRA_KEYWORDS:
|
724
|
-
yield index, Keyword.Pseudo, value
|
725
|
-
else:
|
726
|
-
yield index, token, value
|
727
|
-
|
728
|
-
The `PhpLexer` and `LuaLexer` use this method to resolve builtin functions.
|