pygments.rb 0.6.3 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/CHANGELOG.md +11 -0
- data/README.md +6 -6
- data/Rakefile +2 -1
- data/cache-lexers.rb +1 -1
- data/circle.yml +7 -0
- data/lib/pygments/lexer.rb +3 -3
- data/lib/pygments/popen.rb +67 -30
- data/lib/pygments/version.rb +1 -1
- data/pygments.rb.gemspec +2 -1
- data/test/test_pygments.rb +16 -16
- data/vendor/pygments-main/AUTHORS +54 -8
- data/vendor/pygments-main/CHANGES +247 -25
- data/vendor/pygments-main/LICENSE +1 -1
- data/vendor/pygments-main/Makefile +15 -6
- data/vendor/pygments-main/README.rst +39 -0
- data/vendor/pygments-main/REVISION +1 -1
- data/vendor/pygments-main/TODO +0 -3
- data/vendor/pygments-main/doc/_themes/pygments14/layout.html +1 -1
- data/vendor/pygments-main/doc/_themes/pygments14/static/pygments14.css_t +1 -1
- data/vendor/pygments-main/doc/conf.py +3 -11
- data/vendor/pygments-main/doc/docs/api.rst +15 -0
- data/vendor/pygments-main/doc/docs/cmdline.rst +6 -2
- data/vendor/pygments-main/doc/docs/filterdevelopment.rst +4 -3
- data/vendor/pygments-main/doc/docs/integrate.rst +11 -15
- data/vendor/pygments-main/doc/docs/java.rst +7 -7
- data/vendor/pygments-main/doc/docs/lexerdevelopment.rst +258 -171
- data/vendor/pygments-main/doc/docs/lexers.rst +2 -2
- data/vendor/pygments-main/doc/docs/styles.rst +58 -0
- data/vendor/pygments-main/doc/docs/tokens.rst +22 -2
- data/vendor/pygments-main/doc/docs/unicode.rst +15 -7
- data/vendor/pygments-main/doc/faq.rst +17 -21
- data/vendor/pygments-main/doc/index.rst +12 -11
- data/vendor/pygments-main/doc/languages.rst +10 -7
- data/vendor/pygments-main/external/autopygmentize +9 -6
- data/vendor/pygments-main/external/lasso-builtins-generator-9.lasso +70 -52
- data/vendor/pygments-main/external/markdown-processor.py +1 -1
- data/vendor/pygments-main/external/moin-parser.py +1 -1
- data/vendor/pygments-main/external/rst-directive.py +1 -1
- data/vendor/pygments-main/pygmentize +2 -1
- data/vendor/pygments-main/pygments/__init__.py +14 -15
- data/vendor/pygments-main/pygments/cmdline.py +188 -113
- data/vendor/pygments-main/pygments/console.py +13 -13
- data/vendor/pygments-main/pygments/filter.py +7 -7
- data/vendor/pygments-main/pygments/filters/__init__.py +24 -32
- data/vendor/pygments-main/pygments/formatter.py +5 -5
- data/vendor/pygments-main/pygments/formatters/__init__.py +92 -44
- data/vendor/pygments-main/pygments/formatters/_mapping.py +51 -69
- data/vendor/pygments-main/pygments/formatters/bbcode.py +1 -1
- data/vendor/pygments-main/pygments/formatters/html.py +63 -51
- data/vendor/pygments-main/pygments/formatters/img.py +25 -22
- data/vendor/pygments-main/pygments/formatters/irc.py +182 -0
- data/vendor/pygments-main/pygments/formatters/latex.py +34 -22
- data/vendor/pygments-main/pygments/formatters/other.py +5 -7
- data/vendor/pygments-main/pygments/formatters/rtf.py +28 -31
- data/vendor/pygments-main/pygments/formatters/svg.py +1 -2
- data/vendor/pygments-main/pygments/formatters/terminal.py +29 -45
- data/vendor/pygments-main/pygments/formatters/terminal256.py +118 -31
- data/vendor/pygments-main/pygments/lexer.py +120 -34
- data/vendor/pygments-main/pygments/lexers/__init__.py +85 -53
- data/vendor/pygments-main/pygments/lexers/{_asybuiltins.py → _asy_builtins.py} +7 -7
- data/vendor/pygments-main/pygments/lexers/{_clbuiltins.py → _cl_builtins.py} +17 -17
- data/vendor/pygments-main/pygments/lexers/_cocoa_builtins.py +72 -0
- data/vendor/pygments-main/pygments/lexers/_csound_builtins.py +1346 -0
- data/vendor/pygments-main/pygments/lexers/{_lassobuiltins.py → _lasso_builtins.py} +4699 -4561
- data/vendor/pygments-main/pygments/lexers/{_luabuiltins.py → _lua_builtins.py} +91 -51
- data/vendor/pygments-main/pygments/lexers/_mapping.py +342 -242
- data/vendor/pygments-main/pygments/lexers/_mql_builtins.py +1172 -0
- data/vendor/pygments-main/pygments/lexers/_openedge_builtins.py +2547 -0
- data/vendor/pygments-main/pygments/lexers/{_phpbuiltins.py → _php_builtins.py} +350 -353
- data/vendor/pygments-main/pygments/lexers/_postgres_builtins.py +600 -212
- data/vendor/pygments-main/pygments/lexers/_scilab_builtins.py +3084 -30
- data/vendor/pygments-main/pygments/lexers/_sourcemod_builtins.py +1163 -0
- data/vendor/pygments-main/pygments/lexers/_stan_builtins.py +91 -13
- data/vendor/pygments-main/pygments/lexers/_stata_builtins.py +419 -0
- data/vendor/pygments-main/pygments/lexers/_tsql_builtins.py +1004 -0
- data/vendor/pygments-main/pygments/lexers/_vim_builtins.py +1939 -0
- data/vendor/pygments-main/pygments/lexers/actionscript.py +240 -0
- data/vendor/pygments-main/pygments/lexers/agile.py +14 -2542
- data/vendor/pygments-main/pygments/lexers/algebra.py +221 -0
- data/vendor/pygments-main/pygments/lexers/ambient.py +76 -0
- data/vendor/pygments-main/pygments/lexers/ampl.py +87 -0
- data/vendor/pygments-main/pygments/lexers/apl.py +101 -0
- data/vendor/pygments-main/pygments/lexers/archetype.py +318 -0
- data/vendor/pygments-main/pygments/lexers/asm.py +276 -81
- data/vendor/pygments-main/pygments/lexers/automation.py +374 -0
- data/vendor/pygments-main/pygments/lexers/basic.py +500 -0
- data/vendor/pygments-main/pygments/lexers/bibtex.py +156 -0
- data/vendor/pygments-main/pygments/lexers/business.py +594 -0
- data/vendor/pygments-main/pygments/lexers/c_cpp.py +252 -0
- data/vendor/pygments-main/pygments/lexers/c_like.py +541 -0
- data/vendor/pygments-main/pygments/lexers/capnproto.py +79 -0
- data/vendor/pygments-main/pygments/lexers/chapel.py +101 -0
- data/vendor/pygments-main/pygments/lexers/clean.py +288 -0
- data/vendor/pygments-main/pygments/lexers/compiled.py +24 -5182
- data/vendor/pygments-main/pygments/lexers/configs.py +833 -0
- data/vendor/pygments-main/pygments/lexers/console.py +114 -0
- data/vendor/pygments-main/pygments/lexers/crystal.py +384 -0
- data/vendor/pygments-main/pygments/lexers/csound.py +366 -0
- data/vendor/pygments-main/pygments/lexers/css.py +689 -0
- data/vendor/pygments-main/pygments/lexers/d.py +251 -0
- data/vendor/pygments-main/pygments/lexers/dalvik.py +6 -6
- data/vendor/pygments-main/pygments/lexers/data.py +555 -0
- data/vendor/pygments-main/pygments/lexers/diff.py +165 -0
- data/vendor/pygments-main/pygments/lexers/dotnet.py +96 -88
- data/vendor/pygments-main/pygments/lexers/dsls.py +878 -0
- data/vendor/pygments-main/pygments/lexers/dylan.py +289 -0
- data/vendor/pygments-main/pygments/lexers/ecl.py +125 -0
- data/vendor/pygments-main/pygments/lexers/eiffel.py +65 -0
- data/vendor/pygments-main/pygments/lexers/elm.py +121 -0
- data/vendor/pygments-main/pygments/lexers/erlang.py +533 -0
- data/vendor/pygments-main/pygments/lexers/esoteric.py +275 -0
- data/vendor/pygments-main/pygments/lexers/ezhil.py +68 -0
- data/vendor/pygments-main/pygments/lexers/factor.py +344 -0
- data/vendor/pygments-main/pygments/lexers/fantom.py +250 -0
- data/vendor/pygments-main/pygments/lexers/felix.py +273 -0
- data/vendor/pygments-main/pygments/lexers/forth.py +177 -0
- data/vendor/pygments-main/pygments/lexers/fortran.py +205 -0
- data/vendor/pygments-main/pygments/lexers/foxpro.py +1 -1
- data/vendor/pygments-main/pygments/lexers/functional.py +11 -3661
- data/vendor/pygments-main/pygments/lexers/go.py +101 -0
- data/vendor/pygments-main/pygments/lexers/grammar_notation.py +213 -0
- data/vendor/pygments-main/pygments/lexers/graph.py +7 -8
- data/vendor/pygments-main/pygments/lexers/graphics.py +553 -0
- data/vendor/pygments-main/pygments/lexers/haskell.py +840 -0
- data/vendor/pygments-main/pygments/lexers/haxe.py +936 -0
- data/vendor/pygments-main/pygments/lexers/hdl.py +172 -145
- data/vendor/pygments-main/pygments/lexers/hexdump.py +97 -0
- data/vendor/pygments-main/pygments/lexers/html.py +602 -0
- data/vendor/pygments-main/pygments/lexers/idl.py +270 -0
- data/vendor/pygments-main/pygments/lexers/igor.py +288 -0
- data/vendor/pygments-main/pygments/lexers/inferno.py +3 -3
- data/vendor/pygments-main/pygments/lexers/installers.py +322 -0
- data/vendor/pygments-main/pygments/lexers/int_fiction.py +1343 -0
- data/vendor/pygments-main/pygments/lexers/iolang.py +63 -0
- data/vendor/pygments-main/pygments/lexers/j.py +146 -0
- data/vendor/pygments-main/pygments/lexers/javascript.py +1506 -0
- data/vendor/pygments-main/pygments/lexers/julia.py +333 -0
- data/vendor/pygments-main/pygments/lexers/jvm.py +232 -186
- data/vendor/pygments-main/pygments/lexers/lisp.py +2621 -0
- data/vendor/pygments-main/pygments/lexers/make.py +202 -0
- data/vendor/pygments-main/pygments/lexers/markup.py +595 -0
- data/vendor/pygments-main/pygments/lexers/math.py +11 -2276
- data/vendor/pygments-main/pygments/lexers/matlab.py +663 -0
- data/vendor/pygments-main/pygments/lexers/ml.py +769 -0
- data/vendor/pygments-main/pygments/lexers/modeling.py +358 -0
- data/vendor/pygments-main/pygments/lexers/modula2.py +1561 -0
- data/vendor/pygments-main/pygments/lexers/monte.py +203 -0
- data/vendor/pygments-main/pygments/lexers/ncl.py +1053 -0
- data/vendor/pygments-main/pygments/lexers/nimrod.py +159 -0
- data/vendor/pygments-main/pygments/lexers/nit.py +64 -0
- data/vendor/pygments-main/pygments/lexers/nix.py +136 -0
- data/vendor/pygments-main/pygments/lexers/oberon.py +105 -0
- data/vendor/pygments-main/pygments/lexers/objective.py +504 -0
- data/vendor/pygments-main/pygments/lexers/ooc.py +85 -0
- data/vendor/pygments-main/pygments/lexers/other.py +30 -4481
- data/vendor/pygments-main/pygments/lexers/parasail.py +79 -0
- data/vendor/pygments-main/pygments/lexers/parsers.py +171 -114
- data/vendor/pygments-main/pygments/lexers/pascal.py +644 -0
- data/vendor/pygments-main/pygments/lexers/pawn.py +199 -0
- data/vendor/pygments-main/pygments/lexers/perl.py +616 -0
- data/vendor/pygments-main/pygments/lexers/php.py +267 -0
- data/vendor/pygments-main/pygments/lexers/praat.py +294 -0
- data/vendor/pygments-main/pygments/lexers/prolog.py +306 -0
- data/vendor/pygments-main/pygments/lexers/python.py +938 -0
- data/vendor/pygments-main/pygments/lexers/qvt.py +152 -0
- data/vendor/pygments-main/pygments/lexers/r.py +453 -0
- data/vendor/pygments-main/pygments/lexers/rdf.py +195 -24
- data/vendor/pygments-main/pygments/lexers/rebol.py +431 -0
- data/vendor/pygments-main/pygments/lexers/resource.py +85 -0
- data/vendor/pygments-main/pygments/lexers/rnc.py +67 -0
- data/vendor/pygments-main/pygments/lexers/roboconf.py +82 -0
- data/vendor/pygments-main/pygments/lexers/{_robotframeworklexer.py → robotframework.py} +20 -18
- data/vendor/pygments-main/pygments/lexers/ruby.py +519 -0
- data/vendor/pygments-main/pygments/lexers/rust.py +209 -0
- data/vendor/pygments-main/pygments/lexers/sas.py +228 -0
- data/vendor/pygments-main/pygments/lexers/scripting.py +1222 -0
- data/vendor/pygments-main/pygments/lexers/shell.py +478 -115
- data/vendor/pygments-main/pygments/lexers/smalltalk.py +195 -0
- data/vendor/pygments-main/pygments/lexers/smv.py +75 -0
- data/vendor/pygments-main/pygments/lexers/snobol.py +83 -0
- data/vendor/pygments-main/pygments/lexers/special.py +6 -2
- data/vendor/pygments-main/pygments/lexers/sql.py +209 -120
- data/vendor/pygments-main/pygments/lexers/stata.py +106 -0
- data/vendor/pygments-main/pygments/lexers/supercollider.py +90 -0
- data/vendor/pygments-main/pygments/lexers/tcl.py +145 -0
- data/vendor/pygments-main/pygments/lexers/templates.py +282 -90
- data/vendor/pygments-main/pygments/lexers/testing.py +207 -0
- data/vendor/pygments-main/pygments/lexers/text.py +15 -2045
- data/vendor/pygments-main/pygments/lexers/textedit.py +169 -0
- data/vendor/pygments-main/pygments/lexers/textfmts.py +297 -0
- data/vendor/pygments-main/pygments/lexers/theorem.py +458 -0
- data/vendor/pygments-main/pygments/lexers/trafficscript.py +54 -0
- data/vendor/pygments-main/pygments/lexers/typoscript.py +225 -0
- data/vendor/pygments-main/pygments/lexers/urbi.py +133 -0
- data/vendor/pygments-main/pygments/lexers/varnish.py +190 -0
- data/vendor/pygments-main/pygments/lexers/verification.py +111 -0
- data/vendor/pygments-main/pygments/lexers/web.py +13 -4499
- data/vendor/pygments-main/pygments/lexers/webmisc.py +979 -0
- data/vendor/pygments-main/pygments/lexers/whiley.py +117 -0
- data/vendor/pygments-main/pygments/lexers/x10.py +69 -0
- data/vendor/pygments-main/pygments/modeline.py +5 -2
- data/vendor/pygments-main/pygments/plugin.py +1 -1
- data/vendor/pygments-main/pygments/regexopt.py +92 -0
- data/vendor/pygments-main/pygments/scanner.py +3 -2
- data/vendor/pygments-main/pygments/sphinxext.py +11 -6
- data/vendor/pygments-main/pygments/style.py +41 -4
- data/vendor/pygments-main/pygments/styles/__init__.py +5 -1
- data/vendor/pygments-main/pygments/styles/algol.py +63 -0
- data/vendor/pygments-main/pygments/styles/algol_nu.py +63 -0
- data/vendor/pygments-main/pygments/styles/arduino.py +98 -0
- data/vendor/pygments-main/pygments/styles/autumn.py +1 -1
- data/vendor/pygments-main/pygments/styles/borland.py +1 -1
- data/vendor/pygments-main/pygments/styles/bw.py +1 -1
- data/vendor/pygments-main/pygments/styles/colorful.py +1 -1
- data/vendor/pygments-main/pygments/styles/default.py +1 -1
- data/vendor/pygments-main/pygments/styles/emacs.py +1 -1
- data/vendor/pygments-main/pygments/styles/friendly.py +1 -1
- data/vendor/pygments-main/pygments/styles/fruity.py +1 -1
- data/vendor/pygments-main/pygments/styles/igor.py +1 -1
- data/vendor/pygments-main/pygments/styles/lovelace.py +97 -0
- data/vendor/pygments-main/pygments/styles/manni.py +1 -1
- data/vendor/pygments-main/pygments/styles/monokai.py +1 -1
- data/vendor/pygments-main/pygments/styles/murphy.py +1 -1
- data/vendor/pygments-main/pygments/styles/native.py +1 -1
- data/vendor/pygments-main/pygments/styles/paraiso_dark.py +1 -1
- data/vendor/pygments-main/pygments/styles/paraiso_light.py +1 -1
- data/vendor/pygments-main/pygments/styles/pastie.py +1 -1
- data/vendor/pygments-main/pygments/styles/perldoc.py +2 -2
- data/vendor/pygments-main/pygments/styles/rrt.py +1 -1
- data/vendor/pygments-main/pygments/styles/sas.py +41 -0
- data/vendor/pygments-main/pygments/styles/stata.py +37 -0
- data/vendor/pygments-main/pygments/styles/tango.py +1 -1
- data/vendor/pygments-main/pygments/styles/trac.py +1 -1
- data/vendor/pygments-main/pygments/styles/vim.py +1 -1
- data/vendor/pygments-main/pygments/styles/vs.py +1 -1
- data/vendor/pygments-main/pygments/styles/xcode.py +1 -1
- data/vendor/pygments-main/pygments/token.py +31 -16
- data/vendor/pygments-main/pygments/unistring.py +141 -65
- data/vendor/pygments-main/pygments/util.py +129 -33
- data/vendor/pygments-main/requirements.txt +5 -0
- data/vendor/pygments-main/scripts/check_sources.py +25 -40
- data/vendor/pygments-main/scripts/debug_lexer.py +246 -0
- data/vendor/pygments-main/scripts/find_error.py +1 -0
- data/vendor/pygments-main/scripts/get_vimkw.py +35 -4
- data/vendor/pygments-main/setup.cfg +3 -0
- data/vendor/pygments-main/setup.py +17 -30
- data/vendor/pygments-main/tox.ini +7 -0
- metadata +159 -387
- data/vendor/pygments-main/ez_setup.py +0 -382
- data/vendor/pygments-main/pygments/lexers/_cocoabuiltins.py +0 -73
- data/vendor/pygments-main/pygments/lexers/_openedgebuiltins.py +0 -562
- data/vendor/pygments-main/pygments/lexers/_sourcemodbuiltins.py +0 -1077
- data/vendor/pygments-main/pygments/lexers/_vimbuiltins.py +0 -13
- data/vendor/pygments-main/pygments/lexers/qbasic.py +0 -157
- data/vendor/pygments-main/scripts/find_codetags.py +0 -213
- data/vendor/pygments-main/scripts/find_error.py +0 -173
- data/vendor/pygments-main/tests/dtds/HTML4-f.dtd +0 -37
- data/vendor/pygments-main/tests/dtds/HTML4-s.dtd +0 -869
- data/vendor/pygments-main/tests/dtds/HTML4.dcl +0 -88
- data/vendor/pygments-main/tests/dtds/HTML4.dtd +0 -1092
- data/vendor/pygments-main/tests/dtds/HTML4.soc +0 -9
- data/vendor/pygments-main/tests/dtds/HTMLlat1.ent +0 -195
- data/vendor/pygments-main/tests/dtds/HTMLspec.ent +0 -77
- data/vendor/pygments-main/tests/dtds/HTMLsym.ent +0 -241
- data/vendor/pygments-main/tests/examplefiles/99_bottles_of_beer.chpl +0 -118
- data/vendor/pygments-main/tests/examplefiles/AcidStateAdvanced.hs +0 -209
- data/vendor/pygments-main/tests/examplefiles/AlternatingGroup.mu +0 -102
- data/vendor/pygments-main/tests/examplefiles/BOM.js +0 -1
- data/vendor/pygments-main/tests/examplefiles/CPDictionary.j +0 -611
- data/vendor/pygments-main/tests/examplefiles/Config.in.cache +0 -1973
- data/vendor/pygments-main/tests/examplefiles/Constants.mo +0 -158
- data/vendor/pygments-main/tests/examplefiles/DancingSudoku.lhs +0 -411
- data/vendor/pygments-main/tests/examplefiles/Deflate.fs +0 -578
- data/vendor/pygments-main/tests/examplefiles/Error.pmod +0 -38
- data/vendor/pygments-main/tests/examplefiles/Errors.scala +0 -18
- data/vendor/pygments-main/tests/examplefiles/FakeFile.pike +0 -360
- data/vendor/pygments-main/tests/examplefiles/Get-CommandDefinitionHtml.ps1 +0 -66
- data/vendor/pygments-main/tests/examplefiles/IPDispatchC.nc +0 -104
- data/vendor/pygments-main/tests/examplefiles/IPDispatchP.nc +0 -671
- data/vendor/pygments-main/tests/examplefiles/Intro.java +0 -1660
- data/vendor/pygments-main/tests/examplefiles/Makefile +0 -1131
- data/vendor/pygments-main/tests/examplefiles/Object.st +0 -4394
- data/vendor/pygments-main/tests/examplefiles/OrderedMap.hx +0 -584
- data/vendor/pygments-main/tests/examplefiles/RoleQ.pm6 +0 -23
- data/vendor/pygments-main/tests/examplefiles/SmallCheck.hs +0 -378
- data/vendor/pygments-main/tests/examplefiles/Sorting.mod +0 -470
- data/vendor/pygments-main/tests/examplefiles/Sudoku.lhs +0 -382
- data/vendor/pygments-main/tests/examplefiles/addressbook.proto +0 -30
- data/vendor/pygments-main/tests/examplefiles/antlr_ANTLRv3.g +0 -608
- data/vendor/pygments-main/tests/examplefiles/antlr_throws +0 -1
- data/vendor/pygments-main/tests/examplefiles/apache2.conf +0 -393
- data/vendor/pygments-main/tests/examplefiles/as3_test.as +0 -143
- data/vendor/pygments-main/tests/examplefiles/as3_test2.as +0 -46
- data/vendor/pygments-main/tests/examplefiles/as3_test3.as +0 -3
- data/vendor/pygments-main/tests/examplefiles/aspx-cs_example +0 -27
- data/vendor/pygments-main/tests/examplefiles/autoit_submit.au3 +0 -25
- data/vendor/pygments-main/tests/examplefiles/badcase.java +0 -2
- data/vendor/pygments-main/tests/examplefiles/batchfile.bat +0 -49
- data/vendor/pygments-main/tests/examplefiles/bigtest.nsi +0 -308
- data/vendor/pygments-main/tests/examplefiles/boot-9.scm +0 -1557
- data/vendor/pygments-main/tests/examplefiles/ca65_example +0 -284
- data/vendor/pygments-main/tests/examplefiles/cbmbas_example +0 -9
- data/vendor/pygments-main/tests/examplefiles/cells.ps +0 -515
- data/vendor/pygments-main/tests/examplefiles/ceval.c +0 -2604
- data/vendor/pygments-main/tests/examplefiles/cheetah_example.html +0 -13
- data/vendor/pygments-main/tests/examplefiles/classes.dylan +0 -125
- data/vendor/pygments-main/tests/examplefiles/clojure-weird-keywords.clj +0 -5
- data/vendor/pygments-main/tests/examplefiles/condensed_ruby.rb +0 -10
- data/vendor/pygments-main/tests/examplefiles/coq_RelationClasses +0 -447
- data/vendor/pygments-main/tests/examplefiles/core.cljs +0 -52
- data/vendor/pygments-main/tests/examplefiles/database.pytb +0 -20
- data/vendor/pygments-main/tests/examplefiles/de.MoinMoin.po +0 -2461
- data/vendor/pygments-main/tests/examplefiles/demo.ahk +0 -181
- data/vendor/pygments-main/tests/examplefiles/demo.cfm +0 -50
- data/vendor/pygments-main/tests/examplefiles/demo.hbs +0 -12
- data/vendor/pygments-main/tests/examplefiles/django_sample.html+django +0 -68
- data/vendor/pygments-main/tests/examplefiles/dwarf.cw +0 -17
- data/vendor/pygments-main/tests/examplefiles/ember.handlebars +0 -33
- data/vendor/pygments-main/tests/examplefiles/erl_session +0 -10
- data/vendor/pygments-main/tests/examplefiles/escape_semicolon.clj +0 -1
- data/vendor/pygments-main/tests/examplefiles/evil_regex.js +0 -48
- data/vendor/pygments-main/tests/examplefiles/example.Rd +0 -78
- data/vendor/pygments-main/tests/examplefiles/example.als +0 -217
- data/vendor/pygments-main/tests/examplefiles/example.bug +0 -54
- data/vendor/pygments-main/tests/examplefiles/example.c +0 -2080
- data/vendor/pygments-main/tests/examplefiles/example.ceylon +0 -52
- data/vendor/pygments-main/tests/examplefiles/example.chai +0 -6
- data/vendor/pygments-main/tests/examplefiles/example.clay +0 -33
- data/vendor/pygments-main/tests/examplefiles/example.cls +0 -15
- data/vendor/pygments-main/tests/examplefiles/example.cob +0 -3556
- data/vendor/pygments-main/tests/examplefiles/example.coffee +0 -27
- data/vendor/pygments-main/tests/examplefiles/example.cpp +0 -2363
- data/vendor/pygments-main/tests/examplefiles/example.e +0 -124
- data/vendor/pygments-main/tests/examplefiles/example.f90 +0 -8
- data/vendor/pygments-main/tests/examplefiles/example.feature +0 -16
- data/vendor/pygments-main/tests/examplefiles/example.gd +0 -23
- data/vendor/pygments-main/tests/examplefiles/example.gi +0 -64
- data/vendor/pygments-main/tests/examplefiles/example.groovy +0 -2
- data/vendor/pygments-main/tests/examplefiles/example.gs +0 -106
- data/vendor/pygments-main/tests/examplefiles/example.gst +0 -7
- data/vendor/pygments-main/tests/examplefiles/example.hs +0 -27
- data/vendor/pygments-main/tests/examplefiles/example.hx +0 -185
- data/vendor/pygments-main/tests/examplefiles/example.i6t +0 -32
- data/vendor/pygments-main/tests/examplefiles/example.i7x +0 -45
- data/vendor/pygments-main/tests/examplefiles/example.inf +0 -374
- data/vendor/pygments-main/tests/examplefiles/example.j +0 -564
- data/vendor/pygments-main/tests/examplefiles/example.jag +0 -48
- data/vendor/pygments-main/tests/examplefiles/example.java +0 -16
- data/vendor/pygments-main/tests/examplefiles/example.kal +0 -75
- data/vendor/pygments-main/tests/examplefiles/example.kt +0 -47
- data/vendor/pygments-main/tests/examplefiles/example.lagda +0 -19
- data/vendor/pygments-main/tests/examplefiles/example.liquid +0 -42
- data/vendor/pygments-main/tests/examplefiles/example.lua +0 -250
- data/vendor/pygments-main/tests/examplefiles/example.ma +0 -8
- data/vendor/pygments-main/tests/examplefiles/example.monkey +0 -152
- data/vendor/pygments-main/tests/examplefiles/example.moo +0 -26
- data/vendor/pygments-main/tests/examplefiles/example.moon +0 -629
- data/vendor/pygments-main/tests/examplefiles/example.mq4 +0 -187
- data/vendor/pygments-main/tests/examplefiles/example.mqh +0 -123
- data/vendor/pygments-main/tests/examplefiles/example.msc +0 -43
- data/vendor/pygments-main/tests/examplefiles/example.ni +0 -57
- data/vendor/pygments-main/tests/examplefiles/example.nim +0 -1010
- data/vendor/pygments-main/tests/examplefiles/example.nix +0 -80
- data/vendor/pygments-main/tests/examplefiles/example.ns2 +0 -69
- data/vendor/pygments-main/tests/examplefiles/example.pas +0 -2708
- data/vendor/pygments-main/tests/examplefiles/example.pp +0 -8
- data/vendor/pygments-main/tests/examplefiles/example.prg +0 -161
- data/vendor/pygments-main/tests/examplefiles/example.rb +0 -1852
- data/vendor/pygments-main/tests/examplefiles/example.red +0 -257
- data/vendor/pygments-main/tests/examplefiles/example.reds +0 -150
- data/vendor/pygments-main/tests/examplefiles/example.reg +0 -19
- data/vendor/pygments-main/tests/examplefiles/example.rexx +0 -50
- data/vendor/pygments-main/tests/examplefiles/example.rhtml +0 -561
- data/vendor/pygments-main/tests/examplefiles/example.rkt +0 -743
- data/vendor/pygments-main/tests/examplefiles/example.rpf +0 -4
- data/vendor/pygments-main/tests/examplefiles/example.sh +0 -22
- data/vendor/pygments-main/tests/examplefiles/example.sh-session +0 -19
- data/vendor/pygments-main/tests/examplefiles/example.shell-session +0 -45
- data/vendor/pygments-main/tests/examplefiles/example.slim +0 -31
- data/vendor/pygments-main/tests/examplefiles/example.sls +0 -51
- data/vendor/pygments-main/tests/examplefiles/example.sml +0 -156
- data/vendor/pygments-main/tests/examplefiles/example.snobol +0 -15
- data/vendor/pygments-main/tests/examplefiles/example.stan +0 -110
- data/vendor/pygments-main/tests/examplefiles/example.tea +0 -34
- data/vendor/pygments-main/tests/examplefiles/example.todotxt +0 -9
- data/vendor/pygments-main/tests/examplefiles/example.ts +0 -28
- data/vendor/pygments-main/tests/examplefiles/example.u +0 -548
- data/vendor/pygments-main/tests/examplefiles/example.weechatlog +0 -9
- data/vendor/pygments-main/tests/examplefiles/example.xhtml +0 -376
- data/vendor/pygments-main/tests/examplefiles/example.xtend +0 -34
- data/vendor/pygments-main/tests/examplefiles/example.yaml +0 -302
- data/vendor/pygments-main/tests/examplefiles/example2.aspx +0 -29
- data/vendor/pygments-main/tests/examplefiles/example2.msc +0 -79
- data/vendor/pygments-main/tests/examplefiles/exampleScript.cfc +0 -241
- data/vendor/pygments-main/tests/examplefiles/exampleTag.cfc +0 -18
- data/vendor/pygments-main/tests/examplefiles/example_elixir.ex +0 -205
- data/vendor/pygments-main/tests/examplefiles/example_file.fy +0 -128
- data/vendor/pygments-main/tests/examplefiles/firefox.mak +0 -586
- data/vendor/pygments-main/tests/examplefiles/flipflop.sv +0 -19
- data/vendor/pygments-main/tests/examplefiles/foo.sce +0 -6
- data/vendor/pygments-main/tests/examplefiles/format.ml +0 -1213
- data/vendor/pygments-main/tests/examplefiles/fucked_up.rb +0 -77
- data/vendor/pygments-main/tests/examplefiles/function.mu +0 -1
- data/vendor/pygments-main/tests/examplefiles/functional.rst +0 -1472
- data/vendor/pygments-main/tests/examplefiles/garcia-wachs.kk +0 -133
- data/vendor/pygments-main/tests/examplefiles/genclass.clj +0 -510
- data/vendor/pygments-main/tests/examplefiles/genshi_example.xml+genshi +0 -193
- data/vendor/pygments-main/tests/examplefiles/genshitext_example.genshitext +0 -33
- data/vendor/pygments-main/tests/examplefiles/glsl.frag +0 -7
- data/vendor/pygments-main/tests/examplefiles/glsl.vert +0 -13
- data/vendor/pygments-main/tests/examplefiles/grammar-test.p6 +0 -22
- data/vendor/pygments-main/tests/examplefiles/hash_syntax.rb +0 -5
- data/vendor/pygments-main/tests/examplefiles/hello.at +0 -6
- data/vendor/pygments-main/tests/examplefiles/hello.golo +0 -5
- data/vendor/pygments-main/tests/examplefiles/hello.lsl +0 -12
- data/vendor/pygments-main/tests/examplefiles/hello.smali +0 -40
- data/vendor/pygments-main/tests/examplefiles/hello.sp +0 -9
- data/vendor/pygments-main/tests/examplefiles/html+php_faulty.php +0 -1
- data/vendor/pygments-main/tests/examplefiles/http_request_example +0 -15
- data/vendor/pygments-main/tests/examplefiles/http_response_example +0 -29
- data/vendor/pygments-main/tests/examplefiles/hybris_File.hy +0 -174
- data/vendor/pygments-main/tests/examplefiles/idl_sample.pro +0 -73
- data/vendor/pygments-main/tests/examplefiles/iex_example +0 -23
- data/vendor/pygments-main/tests/examplefiles/inet_pton6.dg +0 -71
- data/vendor/pygments-main/tests/examplefiles/intro.ik +0 -24
- data/vendor/pygments-main/tests/examplefiles/ints.php +0 -10
- data/vendor/pygments-main/tests/examplefiles/intsyn.fun +0 -675
- data/vendor/pygments-main/tests/examplefiles/intsyn.sig +0 -286
- data/vendor/pygments-main/tests/examplefiles/irb_heredoc +0 -8
- data/vendor/pygments-main/tests/examplefiles/irc.lsp +0 -214
- data/vendor/pygments-main/tests/examplefiles/java.properties +0 -16
- data/vendor/pygments-main/tests/examplefiles/jbst_example1.jbst +0 -28
- data/vendor/pygments-main/tests/examplefiles/jbst_example2.jbst +0 -45
- data/vendor/pygments-main/tests/examplefiles/jinjadesignerdoc.rst +0 -713
- data/vendor/pygments-main/tests/examplefiles/json.lasso +0 -301
- data/vendor/pygments-main/tests/examplefiles/json.lasso9 +0 -213
- data/vendor/pygments-main/tests/examplefiles/language.hy +0 -165
- data/vendor/pygments-main/tests/examplefiles/lighttpd_config.conf +0 -13
- data/vendor/pygments-main/tests/examplefiles/limbo.b +0 -456
- data/vendor/pygments-main/tests/examplefiles/linecontinuation.py +0 -47
- data/vendor/pygments-main/tests/examplefiles/livescript-demo.ls +0 -43
- data/vendor/pygments-main/tests/examplefiles/logos_example.xm +0 -28
- data/vendor/pygments-main/tests/examplefiles/ltmain.sh +0 -2849
- data/vendor/pygments-main/tests/examplefiles/main.cmake +0 -44
- data/vendor/pygments-main/tests/examplefiles/markdown.lsp +0 -679
- data/vendor/pygments-main/tests/examplefiles/matlab_noreturn +0 -3
- data/vendor/pygments-main/tests/examplefiles/matlab_sample +0 -30
- data/vendor/pygments-main/tests/examplefiles/matlabsession_sample.txt +0 -37
- data/vendor/pygments-main/tests/examplefiles/metagrammar.treetop +0 -455
- data/vendor/pygments-main/tests/examplefiles/minehunt.qml +0 -112
- data/vendor/pygments-main/tests/examplefiles/minimal.ns2 +0 -4
- data/vendor/pygments-main/tests/examplefiles/moin_SyntaxReference.txt +0 -340
- data/vendor/pygments-main/tests/examplefiles/multiline_regexes.rb +0 -38
- data/vendor/pygments-main/tests/examplefiles/nanomsg.intr +0 -95
- data/vendor/pygments-main/tests/examplefiles/nasm_aoutso.asm +0 -96
- data/vendor/pygments-main/tests/examplefiles/nasm_objexe.asm +0 -30
- data/vendor/pygments-main/tests/examplefiles/nemerle_sample.n +0 -87
- data/vendor/pygments-main/tests/examplefiles/nginx_nginx.conf +0 -118
- data/vendor/pygments-main/tests/examplefiles/numbers.c +0 -12
- data/vendor/pygments-main/tests/examplefiles/objc_example.m +0 -179
- data/vendor/pygments-main/tests/examplefiles/openedge_example +0 -34
- data/vendor/pygments-main/tests/examplefiles/pawn_example +0 -25
- data/vendor/pygments-main/tests/examplefiles/perl_misc +0 -62
- data/vendor/pygments-main/tests/examplefiles/perl_perl5db +0 -998
- data/vendor/pygments-main/tests/examplefiles/perl_regex-delims +0 -120
- data/vendor/pygments-main/tests/examplefiles/perlfunc.1 +0 -856
- data/vendor/pygments-main/tests/examplefiles/phpMyAdmin.spec +0 -163
- data/vendor/pygments-main/tests/examplefiles/phpcomplete.vim +0 -567
- data/vendor/pygments-main/tests/examplefiles/pleac.in.rb +0 -1223
- data/vendor/pygments-main/tests/examplefiles/postgresql_test.txt +0 -47
- data/vendor/pygments-main/tests/examplefiles/pppoe.applescript +0 -10
- data/vendor/pygments-main/tests/examplefiles/psql_session.txt +0 -122
- data/vendor/pygments-main/tests/examplefiles/py3_test.txt +0 -2
- data/vendor/pygments-main/tests/examplefiles/py3tb_test.py3tb +0 -4
- data/vendor/pygments-main/tests/examplefiles/pycon_test.pycon +0 -14
- data/vendor/pygments-main/tests/examplefiles/pytb_test2.pytb +0 -2
- data/vendor/pygments-main/tests/examplefiles/pytb_test3.pytb +0 -4
- data/vendor/pygments-main/tests/examplefiles/python25-bsd.mak +0 -234
- data/vendor/pygments-main/tests/examplefiles/qbasic_example +0 -2
- data/vendor/pygments-main/tests/examplefiles/qsort.prolog +0 -13
- data/vendor/pygments-main/tests/examplefiles/r-console-transcript.Rout +0 -38
- data/vendor/pygments-main/tests/examplefiles/r6rs-comments.scm +0 -23
- data/vendor/pygments-main/tests/examplefiles/ragel-cpp_rlscan +0 -280
- data/vendor/pygments-main/tests/examplefiles/ragel-cpp_snippet +0 -2
- data/vendor/pygments-main/tests/examplefiles/regex.js +0 -22
- data/vendor/pygments-main/tests/examplefiles/reversi.lsp +0 -427
- data/vendor/pygments-main/tests/examplefiles/robotframework_test.txt +0 -39
- data/vendor/pygments-main/tests/examplefiles/rql-queries.rql +0 -34
- data/vendor/pygments-main/tests/examplefiles/ruby_func_def.rb +0 -11
- data/vendor/pygments-main/tests/examplefiles/rust_example.rs +0 -233
- data/vendor/pygments-main/tests/examplefiles/scilab.sci +0 -30
- data/vendor/pygments-main/tests/examplefiles/scope.cirru +0 -43
- data/vendor/pygments-main/tests/examplefiles/session.dylan-console +0 -9
- data/vendor/pygments-main/tests/examplefiles/sibling.prolog +0 -19
- data/vendor/pygments-main/tests/examplefiles/simple.md +0 -747
- data/vendor/pygments-main/tests/examplefiles/smarty_example.html +0 -209
- data/vendor/pygments-main/tests/examplefiles/source.lgt +0 -343
- data/vendor/pygments-main/tests/examplefiles/sources.list +0 -62
- data/vendor/pygments-main/tests/examplefiles/sparql.rq +0 -23
- data/vendor/pygments-main/tests/examplefiles/sphere.pov +0 -18
- data/vendor/pygments-main/tests/examplefiles/sqlite3.sqlite3-console +0 -27
- data/vendor/pygments-main/tests/examplefiles/squid.conf +0 -30
- data/vendor/pygments-main/tests/examplefiles/string.jl +0 -1031
- data/vendor/pygments-main/tests/examplefiles/string_delimiters.d +0 -21
- data/vendor/pygments-main/tests/examplefiles/stripheredoc.sh +0 -3
- data/vendor/pygments-main/tests/examplefiles/swig_java.swg +0 -1329
- data/vendor/pygments-main/tests/examplefiles/swig_std_vector.i +0 -225
- data/vendor/pygments-main/tests/examplefiles/test.R +0 -185
- data/vendor/pygments-main/tests/examplefiles/test.adb +0 -211
- data/vendor/pygments-main/tests/examplefiles/test.agda +0 -102
- data/vendor/pygments-main/tests/examplefiles/test.apl +0 -26
- data/vendor/pygments-main/tests/examplefiles/test.asy +0 -131
- data/vendor/pygments-main/tests/examplefiles/test.awk +0 -121
- data/vendor/pygments-main/tests/examplefiles/test.bb +0 -95
- data/vendor/pygments-main/tests/examplefiles/test.bmx +0 -145
- data/vendor/pygments-main/tests/examplefiles/test.boo +0 -39
- data/vendor/pygments-main/tests/examplefiles/test.bro +0 -250
- data/vendor/pygments-main/tests/examplefiles/test.cs +0 -374
- data/vendor/pygments-main/tests/examplefiles/test.css +0 -54
- data/vendor/pygments-main/tests/examplefiles/test.cu +0 -36
- data/vendor/pygments-main/tests/examplefiles/test.cyp +0 -123
- data/vendor/pygments-main/tests/examplefiles/test.d +0 -135
- data/vendor/pygments-main/tests/examplefiles/test.dart +0 -23
- data/vendor/pygments-main/tests/examplefiles/test.dtd +0 -89
- data/vendor/pygments-main/tests/examplefiles/test.ebnf +0 -31
- data/vendor/pygments-main/tests/examplefiles/test.ec +0 -605
- data/vendor/pygments-main/tests/examplefiles/test.ecl +0 -58
- data/vendor/pygments-main/tests/examplefiles/test.eh +0 -315
- data/vendor/pygments-main/tests/examplefiles/test.erl +0 -169
- data/vendor/pygments-main/tests/examplefiles/test.evoque +0 -33
- data/vendor/pygments-main/tests/examplefiles/test.fan +0 -818
- data/vendor/pygments-main/tests/examplefiles/test.flx +0 -57
- data/vendor/pygments-main/tests/examplefiles/test.gdc +0 -13
- data/vendor/pygments-main/tests/examplefiles/test.groovy +0 -97
- data/vendor/pygments-main/tests/examplefiles/test.html +0 -339
- data/vendor/pygments-main/tests/examplefiles/test.idr +0 -93
- data/vendor/pygments-main/tests/examplefiles/test.ini +0 -10
- data/vendor/pygments-main/tests/examplefiles/test.java +0 -653
- data/vendor/pygments-main/tests/examplefiles/test.jsp +0 -24
- data/vendor/pygments-main/tests/examplefiles/test.maql +0 -45
- data/vendor/pygments-main/tests/examplefiles/test.mask +0 -41
- data/vendor/pygments-main/tests/examplefiles/test.mod +0 -374
- data/vendor/pygments-main/tests/examplefiles/test.moo +0 -51
- data/vendor/pygments-main/tests/examplefiles/test.myt +0 -166
- data/vendor/pygments-main/tests/examplefiles/test.nim +0 -93
- data/vendor/pygments-main/tests/examplefiles/test.opa +0 -10
- data/vendor/pygments-main/tests/examplefiles/test.p6 +0 -252
- data/vendor/pygments-main/tests/examplefiles/test.pan +0 -54
- data/vendor/pygments-main/tests/examplefiles/test.pas +0 -743
- data/vendor/pygments-main/tests/examplefiles/test.php +0 -507
- data/vendor/pygments-main/tests/examplefiles/test.pig +0 -148
- data/vendor/pygments-main/tests/examplefiles/test.plot +0 -333
- data/vendor/pygments-main/tests/examplefiles/test.ps1 +0 -108
- data/vendor/pygments-main/tests/examplefiles/test.pwn +0 -253
- data/vendor/pygments-main/tests/examplefiles/test.pypylog +0 -1839
- data/vendor/pygments-main/tests/examplefiles/test.r3 +0 -114
- data/vendor/pygments-main/tests/examplefiles/test.rb +0 -177
- data/vendor/pygments-main/tests/examplefiles/test.rhtml +0 -43
- data/vendor/pygments-main/tests/examplefiles/test.rsl +0 -111
- data/vendor/pygments-main/tests/examplefiles/test.scaml +0 -8
- data/vendor/pygments-main/tests/examplefiles/test.ssp +0 -12
- data/vendor/pygments-main/tests/examplefiles/test.swift +0 -65
- data/vendor/pygments-main/tests/examplefiles/test.tcsh +0 -830
- data/vendor/pygments-main/tests/examplefiles/test.vb +0 -407
- data/vendor/pygments-main/tests/examplefiles/test.vhdl +0 -161
- data/vendor/pygments-main/tests/examplefiles/test.xqy +0 -138
- data/vendor/pygments-main/tests/examplefiles/test.xsl +0 -23
- data/vendor/pygments-main/tests/examplefiles/test.zep +0 -33
- data/vendor/pygments-main/tests/examplefiles/test2.pypylog +0 -120
- data/vendor/pygments-main/tests/examplefiles/truncated.pytb +0 -15
- data/vendor/pygments-main/tests/examplefiles/type.lisp +0 -1218
- data/vendor/pygments-main/tests/examplefiles/underscore.coffee +0 -603
- data/vendor/pygments-main/tests/examplefiles/unicode.applescript +0 -5
- data/vendor/pygments-main/tests/examplefiles/unicode.go +0 -10
- data/vendor/pygments-main/tests/examplefiles/unicodedoc.py +0 -11
- data/vendor/pygments-main/tests/examplefiles/unix-io.lid +0 -37
- data/vendor/pygments-main/tests/examplefiles/vbnet_test.bas +0 -29
- data/vendor/pygments-main/tests/examplefiles/vctreestatus_hg +0 -4
- data/vendor/pygments-main/tests/examplefiles/vimrc +0 -21
- data/vendor/pygments-main/tests/examplefiles/webkit-transition.css +0 -3
- data/vendor/pygments-main/tests/examplefiles/while.pov +0 -13
- data/vendor/pygments-main/tests/examplefiles/wiki.factor +0 -384
- data/vendor/pygments-main/tests/examplefiles/xml_example +0 -1897
- data/vendor/pygments-main/tests/examplefiles/zmlrpc.f90 +0 -798
- data/vendor/pygments-main/tests/run.py +0 -44
- data/vendor/pygments-main/tests/string_asserts.py +0 -22
- data/vendor/pygments-main/tests/support.py +0 -15
- data/vendor/pygments-main/tests/support/tags +0 -36
- data/vendor/pygments-main/tests/test_basic_api.py +0 -309
- data/vendor/pygments-main/tests/test_cfm.py +0 -46
- data/vendor/pygments-main/tests/test_clexer.py +0 -236
- data/vendor/pygments-main/tests/test_cmdline.py +0 -106
- data/vendor/pygments-main/tests/test_examplefiles.py +0 -110
- data/vendor/pygments-main/tests/test_html_formatter.py +0 -180
- data/vendor/pygments-main/tests/test_latex_formatter.py +0 -57
- data/vendor/pygments-main/tests/test_lexers_other.py +0 -68
- data/vendor/pygments-main/tests/test_objectiveclexer.py +0 -81
- data/vendor/pygments-main/tests/test_perllexer.py +0 -137
- data/vendor/pygments-main/tests/test_qbasiclexer.py +0 -43
- data/vendor/pygments-main/tests/test_regexlexer.py +0 -54
- data/vendor/pygments-main/tests/test_rtf_formatter.py +0 -109
- data/vendor/pygments-main/tests/test_string_asserts.py +0 -39
- data/vendor/pygments-main/tests/test_token.py +0 -46
- data/vendor/pygments-main/tests/test_using_api.py +0 -40
- data/vendor/pygments-main/tests/test_util.py +0 -135
|
@@ -0,0 +1,207 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
"""
|
|
3
|
+
pygments.lexers.testing
|
|
4
|
+
~~~~~~~~~~~~~~~~~~~~~~~
|
|
5
|
+
|
|
6
|
+
Lexers for testing languages.
|
|
7
|
+
|
|
8
|
+
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
|
9
|
+
:license: BSD, see LICENSE for details.
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
from pygments.lexer import RegexLexer, include, bygroups
|
|
13
|
+
from pygments.token import Comment, Keyword, Name, String, Number, Generic, Text
|
|
14
|
+
|
|
15
|
+
__all__ = ['GherkinLexer', 'TAPLexer']
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class GherkinLexer(RegexLexer):
|
|
19
|
+
"""
|
|
20
|
+
For `Gherkin <http://github.com/aslakhellesoy/gherkin/>` syntax.
|
|
21
|
+
|
|
22
|
+
.. versionadded:: 1.2
|
|
23
|
+
"""
|
|
24
|
+
name = 'Gherkin'
|
|
25
|
+
aliases = ['cucumber', 'gherkin']
|
|
26
|
+
filenames = ['*.feature']
|
|
27
|
+
mimetypes = ['text/x-gherkin']
|
|
28
|
+
|
|
29
|
+
feature_keywords = u'^(기능|機能|功能|フィーチャ|خاصية|תכונה|Функціонал|Функционалност|Функционал|Фича|Особина|Могућност|Özellik|Właściwość|Tính năng|Trajto|Savybė|Požiadavka|Požadavek|Osobina|Ominaisuus|Omadus|OH HAI|Mogućnost|Mogucnost|Jellemző|Fīča|Funzionalità|Funktionalität|Funkcionalnost|Funkcionalitāte|Funcționalitate|Functionaliteit|Functionalitate|Funcionalitat|Funcionalidade|Fonctionnalité|Fitur|Feature|Egenskap|Egenskab|Crikey|Característica|Arwedd)(:)(.*)$'
|
|
30
|
+
feature_element_keywords = u'^(\\s*)(시나리오 개요|시나리오|배경|背景|場景大綱|場景|场景大纲|场景|劇本大綱|劇本|剧本大纲|剧本|テンプレ|シナリオテンプレート|シナリオテンプレ|シナリオアウトライン|シナリオ|سيناريو مخطط|سيناريو|الخلفية|תרחיש|תבנית תרחיש|רקע|Тарих|Сценарій|Сценарио|Сценарий структураси|Сценарий|Структура сценарію|Структура сценарија|Структура сценария|Скица|Рамка на сценарий|Пример|Предыстория|Предистория|Позадина|Передумова|Основа|Концепт|Контекст|Założenia|Wharrimean is|Tình huống|The thing of it is|Tausta|Taust|Tapausaihio|Tapaus|Szenariogrundriss|Szenario|Szablon scenariusza|Stsenaarium|Struktura scenarija|Skica|Skenario konsep|Skenario|Situācija|Senaryo taslağı|Senaryo|Scénář|Scénario|Schema dello scenario|Scenārijs pēc parauga|Scenārijs|Scenár|Scenaro|Scenariusz|Scenariul de şablon|Scenariul de sablon|Scenariu|Scenario Outline|Scenario Amlinellol|Scenario|Scenarijus|Scenarijaus šablonas|Scenarij|Scenarie|Rerefons|Raamstsenaarium|Primer|Pozadí|Pozadina|Pozadie|Plan du scénario|Plan du Scénario|Osnova scénáře|Osnova|Náčrt Scénáře|Náčrt Scenáru|Mate|MISHUN SRSLY|MISHUN|Kịch bản|Konturo de la scenaro|Kontext|Konteksts|Kontekstas|Kontekst|Koncept|Khung tình huống|Khung kịch bản|Háttér|Grundlage|Geçmiş|Forgatókönyv vázlat|Forgatókönyv|Fono|Esquema do Cenário|Esquema do Cenario|Esquema del escenario|Esquema de l\'escenari|Escenario|Escenari|Dis is what went down|Dasar|Contexto|Contexte|Contesto|Condiţii|Conditii|Cenário|Cenario|Cefndir|Bối cảnh|Blokes|Bakgrunn|Bakgrund|Baggrund|Background|B4|Antecedents|Antecedentes|All y\'all|Achtergrond|Abstrakt Scenario|Abstract Scenario)(:)(.*)$'
|
|
31
|
+
examples_keywords = u'^(\\s*)(예|例子|例|サンプル|امثلة|דוגמאות|Сценарији|Примери|Приклади|Мисоллар|Значения|Örnekler|Voorbeelden|Variantai|Tapaukset|Scenarios|Scenariji|Scenarijai|Příklady|Példák|Príklady|Przykłady|Primjeri|Primeri|Piemēri|Pavyzdžiai|Paraugs|Juhtumid|Exemplos|Exemples|Exemplele|Exempel|Examples|Esempi|Enghreifftiau|Ekzemploj|Eksempler|Ejemplos|EXAMPLZ|Dữ liệu|Contoh|Cobber|Beispiele)(:)(.*)$'
|
|
32
|
+
step_keywords = u'^(\\s*)(하지만|조건|먼저|만일|만약|단|그리고|그러면|那麼|那么|而且|當|当|前提|假設|假设|假如|假定|但是|但し|並且|并且|同時|同时|もし|ならば|ただし|しかし|かつ|و |متى |لكن |عندما |ثم |بفرض |اذاً |כאשר |וגם |בהינתן |אזי |אז |אבל |Якщо |Унда |То |Припустимо, що |Припустимо |Онда |Но |Нехай |Лекин |Когато |Када |Кад |К тому же |И |Задато |Задати |Задате |Если |Допустим |Дадено |Ва |Бирок |Аммо |Али |Але |Агар |А |І |Și |És |Zatati |Zakładając |Zadato |Zadate |Zadano |Zadani |Zadan |Youse know when youse got |Youse know like when |Yna |Ya know how |Ya gotta |Y |Wun |Wtedy |When y\'all |When |Wenn |WEN |Và |Ve |Und |Un |Thì |Then y\'all |Then |Tapi |Tak |Tada |Tad |Så |Stel |Soit |Siis |Si |Sed |Se |Quando |Quand |Quan |Pryd |Pokud |Pokiaľ |Però |Pero |Pak |Oraz |Onda |Ond |Oletetaan |Og |Och |O zaman |Når |När |Niin |Nhưng |N |Mutta |Men |Mas |Maka |Majd |Mais |Maar |Ma |Lorsque |Lorsqu\'|Kun |Kuid |Kui |Khi |Keď |Ketika |Když |Kaj |Kai |Kada |Kad |Jeżeli |Ja |Ir |I CAN HAZ |I |Ha |Givun |Givet |Given y\'all |Given |Gitt |Gegeven |Gegeben sei |Fakat |Eğer ki |Etant donné |Et |Então |Entonces |Entao |En |Eeldades |E |Duota |Dun |Donitaĵo |Donat |Donada |Do |Diyelim ki |Dengan |Den youse gotta |De |Dato |Dar |Dann |Dan |Dado |Dacă |Daca |DEN |Când |Cuando |Cho |Cept |Cand |Cal |But y\'all |But |Buh |Biết |Bet |BUT |Atès |Atunci |Atesa |Anrhegedig a |Angenommen |And y\'all |And |An |Ama |Als |Alors |Allora |Ali |Aleshores |Ale |Akkor |Aber |AN |A také |A |\* )'
|
|
33
|
+
|
|
34
|
+
tokens = {
|
|
35
|
+
'comments': [
|
|
36
|
+
(r'^\s*#.*$', Comment),
|
|
37
|
+
],
|
|
38
|
+
'feature_elements': [
|
|
39
|
+
(step_keywords, Keyword, "step_content_stack"),
|
|
40
|
+
include('comments'),
|
|
41
|
+
(r"(\s|.)", Name.Function),
|
|
42
|
+
],
|
|
43
|
+
'feature_elements_on_stack': [
|
|
44
|
+
(step_keywords, Keyword, "#pop:2"),
|
|
45
|
+
include('comments'),
|
|
46
|
+
(r"(\s|.)", Name.Function),
|
|
47
|
+
],
|
|
48
|
+
'examples_table': [
|
|
49
|
+
(r"\s+\|", Keyword, 'examples_table_header'),
|
|
50
|
+
include('comments'),
|
|
51
|
+
(r"(\s|.)", Name.Function),
|
|
52
|
+
],
|
|
53
|
+
'examples_table_header': [
|
|
54
|
+
(r"\s+\|\s*$", Keyword, "#pop:2"),
|
|
55
|
+
include('comments'),
|
|
56
|
+
(r"\\\|", Name.Variable),
|
|
57
|
+
(r"\s*\|", Keyword),
|
|
58
|
+
(r"[^|]", Name.Variable),
|
|
59
|
+
],
|
|
60
|
+
'scenario_sections_on_stack': [
|
|
61
|
+
(feature_element_keywords,
|
|
62
|
+
bygroups(Name.Function, Keyword, Keyword, Name.Function),
|
|
63
|
+
"feature_elements_on_stack"),
|
|
64
|
+
],
|
|
65
|
+
'narrative': [
|
|
66
|
+
include('scenario_sections_on_stack'),
|
|
67
|
+
include('comments'),
|
|
68
|
+
(r"(\s|.)", Name.Function),
|
|
69
|
+
],
|
|
70
|
+
'table_vars': [
|
|
71
|
+
(r'(<[^>]+>)', Name.Variable),
|
|
72
|
+
],
|
|
73
|
+
'numbers': [
|
|
74
|
+
(r'(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', String),
|
|
75
|
+
],
|
|
76
|
+
'string': [
|
|
77
|
+
include('table_vars'),
|
|
78
|
+
(r'(\s|.)', String),
|
|
79
|
+
],
|
|
80
|
+
'py_string': [
|
|
81
|
+
(r'"""', Keyword, "#pop"),
|
|
82
|
+
include('string'),
|
|
83
|
+
],
|
|
84
|
+
'step_content_root': [
|
|
85
|
+
(r"$", Keyword, "#pop"),
|
|
86
|
+
include('step_content'),
|
|
87
|
+
],
|
|
88
|
+
'step_content_stack': [
|
|
89
|
+
(r"$", Keyword, "#pop:2"),
|
|
90
|
+
include('step_content'),
|
|
91
|
+
],
|
|
92
|
+
'step_content': [
|
|
93
|
+
(r'"', Name.Function, "double_string"),
|
|
94
|
+
include('table_vars'),
|
|
95
|
+
include('numbers'),
|
|
96
|
+
include('comments'),
|
|
97
|
+
(r'(\s|.)', Name.Function),
|
|
98
|
+
],
|
|
99
|
+
'table_content': [
|
|
100
|
+
(r"\s+\|\s*$", Keyword, "#pop"),
|
|
101
|
+
include('comments'),
|
|
102
|
+
(r"\\\|", String),
|
|
103
|
+
(r"\s*\|", Keyword),
|
|
104
|
+
include('string'),
|
|
105
|
+
],
|
|
106
|
+
'double_string': [
|
|
107
|
+
(r'"', Name.Function, "#pop"),
|
|
108
|
+
include('string'),
|
|
109
|
+
],
|
|
110
|
+
'root': [
|
|
111
|
+
(r'\n', Name.Function),
|
|
112
|
+
include('comments'),
|
|
113
|
+
(r'"""', Keyword, "py_string"),
|
|
114
|
+
(r'\s+\|', Keyword, 'table_content'),
|
|
115
|
+
(r'"', Name.Function, "double_string"),
|
|
116
|
+
include('table_vars'),
|
|
117
|
+
include('numbers'),
|
|
118
|
+
(r'(\s*)(@[^@\r\n\t ]+)', bygroups(Name.Function, Name.Tag)),
|
|
119
|
+
(step_keywords, bygroups(Name.Function, Keyword),
|
|
120
|
+
'step_content_root'),
|
|
121
|
+
(feature_keywords, bygroups(Keyword, Keyword, Name.Function),
|
|
122
|
+
'narrative'),
|
|
123
|
+
(feature_element_keywords,
|
|
124
|
+
bygroups(Name.Function, Keyword, Keyword, Name.Function),
|
|
125
|
+
'feature_elements'),
|
|
126
|
+
(examples_keywords,
|
|
127
|
+
bygroups(Name.Function, Keyword, Keyword, Name.Function),
|
|
128
|
+
'examples_table'),
|
|
129
|
+
(r'(\s|.)', Name.Function),
|
|
130
|
+
]
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
class TAPLexer(RegexLexer):
|
|
135
|
+
"""
|
|
136
|
+
For Test Anything Protocol (TAP) output.
|
|
137
|
+
|
|
138
|
+
.. versionadded:: 2.1
|
|
139
|
+
"""
|
|
140
|
+
name = 'TAP'
|
|
141
|
+
aliases = ['tap']
|
|
142
|
+
filenames = ['*.tap']
|
|
143
|
+
|
|
144
|
+
tokens = {
|
|
145
|
+
'root': [
|
|
146
|
+
# A TAP version may be specified.
|
|
147
|
+
(r'^TAP version \d+\n', Name.Namespace),
|
|
148
|
+
|
|
149
|
+
# Specify a plan with a plan line.
|
|
150
|
+
(r'^1\.\.\d+', Keyword.Declaration, 'plan'),
|
|
151
|
+
|
|
152
|
+
# A test failure
|
|
153
|
+
(r'^(not ok)([^\S\n]*)(\d*)',
|
|
154
|
+
bygroups(Generic.Error, Text, Number.Integer), 'test'),
|
|
155
|
+
|
|
156
|
+
# A test success
|
|
157
|
+
(r'^(ok)([^\S\n]*)(\d*)',
|
|
158
|
+
bygroups(Keyword.Reserved, Text, Number.Integer), 'test'),
|
|
159
|
+
|
|
160
|
+
# Diagnostics start with a hash.
|
|
161
|
+
(r'^#.*\n', Comment),
|
|
162
|
+
|
|
163
|
+
# TAP's version of an abort statement.
|
|
164
|
+
(r'^Bail out!.*\n', Generic.Error),
|
|
165
|
+
|
|
166
|
+
# TAP ignores any unrecognized lines.
|
|
167
|
+
(r'^.*\n', Text),
|
|
168
|
+
],
|
|
169
|
+
'plan': [
|
|
170
|
+
# Consume whitespace (but not newline).
|
|
171
|
+
(r'[^\S\n]+', Text),
|
|
172
|
+
|
|
173
|
+
# A plan may have a directive with it.
|
|
174
|
+
(r'#', Comment, 'directive'),
|
|
175
|
+
|
|
176
|
+
# Or it could just end.
|
|
177
|
+
(r'\n', Comment, '#pop'),
|
|
178
|
+
|
|
179
|
+
# Anything else is wrong.
|
|
180
|
+
(r'.*\n', Generic.Error, '#pop'),
|
|
181
|
+
],
|
|
182
|
+
'test': [
|
|
183
|
+
# Consume whitespace (but not newline).
|
|
184
|
+
(r'[^\S\n]+', Text),
|
|
185
|
+
|
|
186
|
+
# A test may have a directive with it.
|
|
187
|
+
(r'#', Comment, 'directive'),
|
|
188
|
+
|
|
189
|
+
(r'\S+', Text),
|
|
190
|
+
|
|
191
|
+
(r'\n', Text, '#pop'),
|
|
192
|
+
],
|
|
193
|
+
'directive': [
|
|
194
|
+
# Consume whitespace (but not newline).
|
|
195
|
+
(r'[^\S\n]+', Comment),
|
|
196
|
+
|
|
197
|
+
# Extract todo items.
|
|
198
|
+
(r'(?i)\bTODO\b', Comment.Preproc),
|
|
199
|
+
|
|
200
|
+
# Extract skip items.
|
|
201
|
+
(r'(?i)\bSKIP\S*', Comment.Preproc),
|
|
202
|
+
|
|
203
|
+
(r'\S+', Comment),
|
|
204
|
+
|
|
205
|
+
(r'\n', Comment, '#pop:2'),
|
|
206
|
+
],
|
|
207
|
+
}
|
|
@@ -5,2051 +5,21 @@
|
|
|
5
5
|
|
|
6
6
|
Lexers for non-source code file types.
|
|
7
7
|
|
|
8
|
-
:copyright: Copyright 2006-
|
|
8
|
+
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
|
9
9
|
:license: BSD, see LICENSE for details.
|
|
10
10
|
"""
|
|
11
11
|
|
|
12
|
-
import
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
from pygments.
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
from pygments.
|
|
20
|
-
from pygments.lexers.
|
|
21
|
-
from pygments.lexers.
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
'RstLexer', 'VimLexer', 'GettextLexer', 'SquidConfLexer',
|
|
27
|
-
'DebianControlLexer', 'DarcsPatchLexer', 'YamlLexer',
|
|
28
|
-
'LighttpdConfLexer', 'NginxConfLexer', 'CMakeLexer', 'HttpLexer',
|
|
29
|
-
'PyPyLogLexer', 'RegeditLexer', 'HxmlLexer', 'EbnfLexer',
|
|
30
|
-
'TodotxtLexer', 'DockerLexer']
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
class IniLexer(RegexLexer):
|
|
34
|
-
"""
|
|
35
|
-
Lexer for configuration files in INI style.
|
|
36
|
-
"""
|
|
37
|
-
|
|
38
|
-
name = 'INI'
|
|
39
|
-
aliases = ['ini', 'cfg', 'dosini']
|
|
40
|
-
filenames = ['*.ini', '*.cfg']
|
|
41
|
-
mimetypes = ['text/x-ini']
|
|
42
|
-
|
|
43
|
-
tokens = {
|
|
44
|
-
'root': [
|
|
45
|
-
(r'\s+', Text),
|
|
46
|
-
(r'[;#].*', Comment.Single),
|
|
47
|
-
(r'\[.*?\]$', Keyword),
|
|
48
|
-
(r'(.*?)([ \t]*)(=)([ \t]*)(.*(?:\n[ \t].+)*)',
|
|
49
|
-
bygroups(Name.Attribute, Text, Operator, Text, String))
|
|
50
|
-
]
|
|
51
|
-
}
|
|
52
|
-
|
|
53
|
-
def analyse_text(text):
|
|
54
|
-
npos = text.find('\n')
|
|
55
|
-
if npos < 3:
|
|
56
|
-
return False
|
|
57
|
-
return text[0] == '[' and text[npos-1] == ']'
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
class RegeditLexer(RegexLexer):
|
|
61
|
-
"""
|
|
62
|
-
Lexer for `Windows Registry
|
|
63
|
-
<http://en.wikipedia.org/wiki/Windows_Registry#.REG_files>`_ files produced
|
|
64
|
-
by regedit.
|
|
65
|
-
|
|
66
|
-
.. versionadded:: 1.6
|
|
67
|
-
"""
|
|
68
|
-
|
|
69
|
-
name = 'reg'
|
|
70
|
-
aliases = ['registry']
|
|
71
|
-
filenames = ['*.reg']
|
|
72
|
-
mimetypes = ['text/x-windows-registry']
|
|
73
|
-
|
|
74
|
-
tokens = {
|
|
75
|
-
'root': [
|
|
76
|
-
(r'Windows Registry Editor.*', Text),
|
|
77
|
-
(r'\s+', Text),
|
|
78
|
-
(r'[;#].*', Comment.Single),
|
|
79
|
-
(r'(\[)(-?)(HKEY_[A-Z_]+)(.*?\])$',
|
|
80
|
-
bygroups(Keyword, Operator, Name.Builtin, Keyword)),
|
|
81
|
-
# String keys, which obey somewhat normal escaping
|
|
82
|
-
(r'("(?:\\"|\\\\|[^"])+")([ \t]*)(=)([ \t]*)',
|
|
83
|
-
bygroups(Name.Attribute, Text, Operator, Text),
|
|
84
|
-
'value'),
|
|
85
|
-
# Bare keys (includes @)
|
|
86
|
-
(r'(.*?)([ \t]*)(=)([ \t]*)',
|
|
87
|
-
bygroups(Name.Attribute, Text, Operator, Text),
|
|
88
|
-
'value'),
|
|
89
|
-
],
|
|
90
|
-
'value': [
|
|
91
|
-
(r'-', Operator, '#pop'), # delete value
|
|
92
|
-
(r'(dword|hex(?:\([0-9a-fA-F]\))?)(:)([0-9a-fA-F,]+)',
|
|
93
|
-
bygroups(Name.Variable, Punctuation, Number), '#pop'),
|
|
94
|
-
# As far as I know, .reg files do not support line continuation.
|
|
95
|
-
(r'.*', String, '#pop'),
|
|
96
|
-
]
|
|
97
|
-
}
|
|
98
|
-
|
|
99
|
-
def analyse_text(text):
|
|
100
|
-
return text.startswith('Windows Registry Editor')
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
class PropertiesLexer(RegexLexer):
|
|
104
|
-
"""
|
|
105
|
-
Lexer for configuration files in Java's properties format.
|
|
106
|
-
|
|
107
|
-
.. versionadded:: 1.4
|
|
108
|
-
"""
|
|
109
|
-
|
|
110
|
-
name = 'Properties'
|
|
111
|
-
aliases = ['properties', 'jproperties']
|
|
112
|
-
filenames = ['*.properties']
|
|
113
|
-
mimetypes = ['text/x-java-properties']
|
|
114
|
-
|
|
115
|
-
tokens = {
|
|
116
|
-
'root': [
|
|
117
|
-
(r'\s+', Text),
|
|
118
|
-
(r'(?:[;#]|//).*$', Comment),
|
|
119
|
-
(r'(.*?)([ \t]*)([=:])([ \t]*)(.*(?:(?<=\\)\n.*)*)',
|
|
120
|
-
bygroups(Name.Attribute, Text, Operator, Text, String)),
|
|
121
|
-
],
|
|
122
|
-
}
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
class SourcesListLexer(RegexLexer):
|
|
126
|
-
"""
|
|
127
|
-
Lexer that highlights debian sources.list files.
|
|
128
|
-
|
|
129
|
-
.. versionadded:: 0.7
|
|
130
|
-
"""
|
|
131
|
-
|
|
132
|
-
name = 'Debian Sourcelist'
|
|
133
|
-
aliases = ['sourceslist', 'sources.list', 'debsources']
|
|
134
|
-
filenames = ['sources.list']
|
|
135
|
-
mimetype = ['application/x-debian-sourceslist']
|
|
136
|
-
|
|
137
|
-
tokens = {
|
|
138
|
-
'root': [
|
|
139
|
-
(r'\s+', Text),
|
|
140
|
-
(r'#.*?$', Comment),
|
|
141
|
-
(r'^(deb(?:-src)?)(\s+)',
|
|
142
|
-
bygroups(Keyword, Text), 'distribution')
|
|
143
|
-
],
|
|
144
|
-
'distribution': [
|
|
145
|
-
(r'#.*?$', Comment, '#pop'),
|
|
146
|
-
(r'\$\(ARCH\)', Name.Variable),
|
|
147
|
-
(r'[^\s$[]+', String),
|
|
148
|
-
(r'\[', String.Other, 'escaped-distribution'),
|
|
149
|
-
(r'\$', String),
|
|
150
|
-
(r'\s+', Text, 'components')
|
|
151
|
-
],
|
|
152
|
-
'escaped-distribution': [
|
|
153
|
-
(r'\]', String.Other, '#pop'),
|
|
154
|
-
(r'\$\(ARCH\)', Name.Variable),
|
|
155
|
-
(r'[^\]$]+', String.Other),
|
|
156
|
-
(r'\$', String.Other)
|
|
157
|
-
],
|
|
158
|
-
'components': [
|
|
159
|
-
(r'#.*?$', Comment, '#pop:2'),
|
|
160
|
-
(r'$', Text, '#pop:2'),
|
|
161
|
-
(r'\s+', Text),
|
|
162
|
-
(r'\S+', Keyword.Pseudo),
|
|
163
|
-
]
|
|
164
|
-
}
|
|
165
|
-
|
|
166
|
-
def analyse_text(text):
|
|
167
|
-
for line in text.split('\n'):
|
|
168
|
-
line = line.strip()
|
|
169
|
-
if not (line.startswith('#') or line.startswith('deb ') or
|
|
170
|
-
line.startswith('deb-src ') or not line):
|
|
171
|
-
return False
|
|
172
|
-
return True
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
class MakefileLexer(Lexer):
|
|
176
|
-
"""
|
|
177
|
-
Lexer for BSD and GNU make extensions (lenient enough to handle both in
|
|
178
|
-
the same file even).
|
|
179
|
-
|
|
180
|
-
*Rewritten in Pygments 0.10.*
|
|
181
|
-
"""
|
|
182
|
-
|
|
183
|
-
name = 'Makefile'
|
|
184
|
-
aliases = ['make', 'makefile', 'mf', 'bsdmake']
|
|
185
|
-
filenames = ['*.mak', '*.mk', 'Makefile', 'makefile', 'Makefile.*', 'GNUmakefile']
|
|
186
|
-
mimetypes = ['text/x-makefile']
|
|
187
|
-
|
|
188
|
-
r_special = re.compile(r'^(?:'
|
|
189
|
-
# BSD Make
|
|
190
|
-
r'\.\s*(include|undef|error|warning|if|else|elif|endif|for|endfor)|'
|
|
191
|
-
# GNU Make
|
|
192
|
-
r'\s*(ifeq|ifneq|ifdef|ifndef|else|endif|-?include|define|endef|:))(?=\s)')
|
|
193
|
-
r_comment = re.compile(r'^\s*@?#')
|
|
194
|
-
|
|
195
|
-
def get_tokens_unprocessed(self, text):
|
|
196
|
-
ins = []
|
|
197
|
-
lines = text.splitlines(True)
|
|
198
|
-
done = ''
|
|
199
|
-
lex = BaseMakefileLexer(**self.options)
|
|
200
|
-
backslashflag = False
|
|
201
|
-
for line in lines:
|
|
202
|
-
if self.r_special.match(line) or backslashflag:
|
|
203
|
-
ins.append((len(done), [(0, Comment.Preproc, line)]))
|
|
204
|
-
backslashflag = line.strip().endswith('\\')
|
|
205
|
-
elif self.r_comment.match(line):
|
|
206
|
-
ins.append((len(done), [(0, Comment, line)]))
|
|
207
|
-
else:
|
|
208
|
-
done += line
|
|
209
|
-
for item in do_insertions(ins, lex.get_tokens_unprocessed(done)):
|
|
210
|
-
yield item
|
|
211
|
-
|
|
212
|
-
def analyse_text(text):
|
|
213
|
-
# Many makefiles have $(BIG_CAPS) style variables
|
|
214
|
-
if re.search(r'\$\([A-Z_]+\)', text):
|
|
215
|
-
return 0.1
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
class BaseMakefileLexer(RegexLexer):
|
|
219
|
-
"""
|
|
220
|
-
Lexer for simple Makefiles (no preprocessing).
|
|
221
|
-
|
|
222
|
-
.. versionadded:: 0.10
|
|
223
|
-
"""
|
|
224
|
-
|
|
225
|
-
name = 'Base Makefile'
|
|
226
|
-
aliases = ['basemake']
|
|
227
|
-
filenames = []
|
|
228
|
-
mimetypes = []
|
|
229
|
-
|
|
230
|
-
tokens = {
|
|
231
|
-
'root': [
|
|
232
|
-
# recipes (need to allow spaces because of expandtabs)
|
|
233
|
-
(r'^(?:[\t ]+.*\n|\n)+', using(BashLexer)),
|
|
234
|
-
# special variables
|
|
235
|
-
(r'\$[<@$+%?|*]', Keyword),
|
|
236
|
-
(r'\s+', Text),
|
|
237
|
-
(r'#.*?\n', Comment),
|
|
238
|
-
(r'(export)(\s+)(?=[\w${}\t -]+\n)',
|
|
239
|
-
bygroups(Keyword, Text), 'export'),
|
|
240
|
-
(r'export\s+', Keyword),
|
|
241
|
-
# assignment
|
|
242
|
-
(r'([\w${}.-]+)(\s*)([!?:+]?=)([ \t]*)((?:.*\\\n)+|.*\n)',
|
|
243
|
-
bygroups(Name.Variable, Text, Operator, Text, using(BashLexer))),
|
|
244
|
-
# strings
|
|
245
|
-
(r'(?s)"(\\\\|\\.|[^"\\])*"', String.Double),
|
|
246
|
-
(r"(?s)'(\\\\|\\.|[^'\\])*'", String.Single),
|
|
247
|
-
# targets
|
|
248
|
-
(r'([^\n:]+)(:+)([ \t]*)', bygroups(Name.Function, Operator, Text),
|
|
249
|
-
'block-header'),
|
|
250
|
-
# expansions
|
|
251
|
-
(r'\$\(', Keyword, 'expansion'),
|
|
252
|
-
],
|
|
253
|
-
'expansion': [
|
|
254
|
-
(r'[^$a-zA-Z_)]+', Text),
|
|
255
|
-
(r'[a-zA-Z_]+', Name.Variable),
|
|
256
|
-
(r'\$', Keyword),
|
|
257
|
-
(r'\(', Keyword, '#push'),
|
|
258
|
-
(r'\)', Keyword, '#pop'),
|
|
259
|
-
],
|
|
260
|
-
'export': [
|
|
261
|
-
(r'[\w${}-]+', Name.Variable),
|
|
262
|
-
(r'\n', Text, '#pop'),
|
|
263
|
-
(r'\s+', Text),
|
|
264
|
-
],
|
|
265
|
-
'block-header': [
|
|
266
|
-
(r'[,|]', Punctuation),
|
|
267
|
-
(r'#.*?\n', Comment, '#pop'),
|
|
268
|
-
(r'\\\n', Text), # line continuation
|
|
269
|
-
(r'\$\(', Keyword, 'expansion'),
|
|
270
|
-
(r'[a-zA-Z_]+', Name),
|
|
271
|
-
(r'\n', Text, '#pop'),
|
|
272
|
-
(r'.', Text),
|
|
273
|
-
],
|
|
274
|
-
}
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
class DiffLexer(RegexLexer):
|
|
278
|
-
"""
|
|
279
|
-
Lexer for unified or context-style diffs or patches.
|
|
280
|
-
"""
|
|
281
|
-
|
|
282
|
-
name = 'Diff'
|
|
283
|
-
aliases = ['diff', 'udiff']
|
|
284
|
-
filenames = ['*.diff', '*.patch']
|
|
285
|
-
mimetypes = ['text/x-diff', 'text/x-patch']
|
|
286
|
-
|
|
287
|
-
tokens = {
|
|
288
|
-
'root': [
|
|
289
|
-
(r' .*\n', Text),
|
|
290
|
-
(r'\+.*\n', Generic.Inserted),
|
|
291
|
-
(r'-.*\n', Generic.Deleted),
|
|
292
|
-
(r'!.*\n', Generic.Strong),
|
|
293
|
-
(r'@.*\n', Generic.Subheading),
|
|
294
|
-
(r'([Ii]ndex|diff).*\n', Generic.Heading),
|
|
295
|
-
(r'=.*\n', Generic.Heading),
|
|
296
|
-
(r'.*\n', Text),
|
|
297
|
-
]
|
|
298
|
-
}
|
|
299
|
-
|
|
300
|
-
def analyse_text(text):
|
|
301
|
-
if text[:7] == 'Index: ':
|
|
302
|
-
return True
|
|
303
|
-
if text[:5] == 'diff ':
|
|
304
|
-
return True
|
|
305
|
-
if text[:4] == '--- ':
|
|
306
|
-
return 0.9
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
DPATCH_KEYWORDS = ['hunk', 'addfile', 'adddir', 'rmfile', 'rmdir', 'move',
|
|
310
|
-
'replace']
|
|
311
|
-
|
|
312
|
-
class DarcsPatchLexer(RegexLexer):
|
|
313
|
-
"""
|
|
314
|
-
DarcsPatchLexer is a lexer for the various versions of the darcs patch
|
|
315
|
-
format. Examples of this format are derived by commands such as
|
|
316
|
-
``darcs annotate --patch`` and ``darcs send``.
|
|
317
|
-
|
|
318
|
-
.. versionadded:: 0.10
|
|
319
|
-
"""
|
|
320
|
-
name = 'Darcs Patch'
|
|
321
|
-
aliases = ['dpatch']
|
|
322
|
-
filenames = ['*.dpatch', '*.darcspatch']
|
|
323
|
-
|
|
324
|
-
tokens = {
|
|
325
|
-
'root': [
|
|
326
|
-
(r'<', Operator),
|
|
327
|
-
(r'>', Operator),
|
|
328
|
-
(r'{', Operator),
|
|
329
|
-
(r'}', Operator),
|
|
330
|
-
(r'(\[)((?:TAG )?)(.*)(\n)(.*)(\*\*)(\d+)(\s?)(\])',
|
|
331
|
-
bygroups(Operator, Keyword, Name, Text, Name, Operator,
|
|
332
|
-
Literal.Date, Text, Operator)),
|
|
333
|
-
(r'(\[)((?:TAG )?)(.*)(\n)(.*)(\*\*)(\d+)(\s?)',
|
|
334
|
-
bygroups(Operator, Keyword, Name, Text, Name, Operator,
|
|
335
|
-
Literal.Date, Text), 'comment'),
|
|
336
|
-
(r'New patches:', Generic.Heading),
|
|
337
|
-
(r'Context:', Generic.Heading),
|
|
338
|
-
(r'Patch bundle hash:', Generic.Heading),
|
|
339
|
-
(r'(\s*)(%s)(.*\n)' % '|'.join(DPATCH_KEYWORDS),
|
|
340
|
-
bygroups(Text, Keyword, Text)),
|
|
341
|
-
(r'\+', Generic.Inserted, "insert"),
|
|
342
|
-
(r'-', Generic.Deleted, "delete"),
|
|
343
|
-
(r'.*\n', Text),
|
|
344
|
-
],
|
|
345
|
-
'comment': [
|
|
346
|
-
(r'[^\]].*\n', Comment),
|
|
347
|
-
(r'\]', Operator, "#pop"),
|
|
348
|
-
],
|
|
349
|
-
'specialText': [ # darcs add [_CODE_] special operators for clarity
|
|
350
|
-
(r'\n', Text, "#pop"), # line-based
|
|
351
|
-
(r'\[_[^_]*_]', Operator),
|
|
352
|
-
],
|
|
353
|
-
'insert': [
|
|
354
|
-
include('specialText'),
|
|
355
|
-
(r'\[', Generic.Inserted),
|
|
356
|
-
(r'[^\n\[]+', Generic.Inserted),
|
|
357
|
-
],
|
|
358
|
-
'delete': [
|
|
359
|
-
include('specialText'),
|
|
360
|
-
(r'\[', Generic.Deleted),
|
|
361
|
-
(r'[^\n\[]+', Generic.Deleted),
|
|
362
|
-
],
|
|
363
|
-
}
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
class IrcLogsLexer(RegexLexer):
|
|
367
|
-
"""
|
|
368
|
-
Lexer for IRC logs in *irssi*, *xchat* or *weechat* style.
|
|
369
|
-
"""
|
|
370
|
-
|
|
371
|
-
name = 'IRC logs'
|
|
372
|
-
aliases = ['irc']
|
|
373
|
-
filenames = ['*.weechatlog']
|
|
374
|
-
mimetypes = ['text/x-irclog']
|
|
375
|
-
|
|
376
|
-
flags = re.VERBOSE | re.MULTILINE
|
|
377
|
-
timestamp = r"""
|
|
378
|
-
(
|
|
379
|
-
# irssi / xchat and others
|
|
380
|
-
(?: \[|\()? # Opening bracket or paren for the timestamp
|
|
381
|
-
(?: # Timestamp
|
|
382
|
-
(?: (?:\d{1,4} [-/]?)+ # Date as - or /-separated groups of digits
|
|
383
|
-
[T ])? # Date/time separator: T or space
|
|
384
|
-
(?: \d?\d [:.]?)+ # Time as :/.-separated groups of 1 or 2 digits
|
|
385
|
-
)
|
|
386
|
-
(?: \]|\))?\s+ # Closing bracket or paren for the timestamp
|
|
387
|
-
|
|
|
388
|
-
# weechat
|
|
389
|
-
\d{4}\s\w{3}\s\d{2}\s # Date
|
|
390
|
-
\d{2}:\d{2}:\d{2}\s+ # Time + Whitespace
|
|
391
|
-
|
|
|
392
|
-
# xchat
|
|
393
|
-
\w{3}\s\d{2}\s # Date
|
|
394
|
-
\d{2}:\d{2}:\d{2}\s+ # Time + Whitespace
|
|
395
|
-
)?
|
|
396
|
-
"""
|
|
397
|
-
tokens = {
|
|
398
|
-
'root': [
|
|
399
|
-
# log start/end
|
|
400
|
-
(r'^\*\*\*\*(.*)\*\*\*\*$', Comment),
|
|
401
|
-
# hack
|
|
402
|
-
("^" + timestamp + r'(\s*<[^>]*>\s*)$', bygroups(Comment.Preproc, Name.Tag)),
|
|
403
|
-
# normal msgs
|
|
404
|
-
("^" + timestamp + r"""
|
|
405
|
-
(\s*<.*?>\s*) # Nick """,
|
|
406
|
-
bygroups(Comment.Preproc, Name.Tag), 'msg'),
|
|
407
|
-
# /me msgs
|
|
408
|
-
("^" + timestamp + r"""
|
|
409
|
-
(\s*[*]\s+) # Star
|
|
410
|
-
(\S+\s+.*?\n) # Nick + rest of message """,
|
|
411
|
-
bygroups(Comment.Preproc, Keyword, Generic.Inserted)),
|
|
412
|
-
# join/part msgs
|
|
413
|
-
("^" + timestamp + r"""
|
|
414
|
-
(\s*(?:\*{3}|<?-[!@=P]?->?)\s*) # Star(s) or symbols
|
|
415
|
-
(\S+\s+) # Nick + Space
|
|
416
|
-
(.*?\n) # Rest of message """,
|
|
417
|
-
bygroups(Comment.Preproc, Keyword, String, Comment)),
|
|
418
|
-
(r"^.*?\n", Text),
|
|
419
|
-
],
|
|
420
|
-
'msg': [
|
|
421
|
-
(r"\S+:(?!//)", Name.Attribute), # Prefix
|
|
422
|
-
(r".*\n", Text, '#pop'),
|
|
423
|
-
],
|
|
424
|
-
}
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
class BBCodeLexer(RegexLexer):
|
|
428
|
-
"""
|
|
429
|
-
A lexer that highlights BBCode(-like) syntax.
|
|
430
|
-
|
|
431
|
-
.. versionadded:: 0.6
|
|
432
|
-
"""
|
|
433
|
-
|
|
434
|
-
name = 'BBCode'
|
|
435
|
-
aliases = ['bbcode']
|
|
436
|
-
mimetypes = ['text/x-bbcode']
|
|
437
|
-
|
|
438
|
-
tokens = {
|
|
439
|
-
'root': [
|
|
440
|
-
(r'[^[]+', Text),
|
|
441
|
-
# tag/end tag begin
|
|
442
|
-
(r'\[/?\w+', Keyword, 'tag'),
|
|
443
|
-
# stray bracket
|
|
444
|
-
(r'\[', Text),
|
|
445
|
-
],
|
|
446
|
-
'tag': [
|
|
447
|
-
(r'\s+', Text),
|
|
448
|
-
# attribute with value
|
|
449
|
-
(r'(\w+)(=)("?[^\s"\]]+"?)',
|
|
450
|
-
bygroups(Name.Attribute, Operator, String)),
|
|
451
|
-
# tag argument (a la [color=green])
|
|
452
|
-
(r'(=)("?[^\s"\]]+"?)',
|
|
453
|
-
bygroups(Operator, String)),
|
|
454
|
-
# tag end
|
|
455
|
-
(r'\]', Keyword, '#pop'),
|
|
456
|
-
],
|
|
457
|
-
}
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
class TexLexer(RegexLexer):
|
|
461
|
-
"""
|
|
462
|
-
Lexer for the TeX and LaTeX typesetting languages.
|
|
463
|
-
"""
|
|
464
|
-
|
|
465
|
-
name = 'TeX'
|
|
466
|
-
aliases = ['tex', 'latex']
|
|
467
|
-
filenames = ['*.tex', '*.aux', '*.toc']
|
|
468
|
-
mimetypes = ['text/x-tex', 'text/x-latex']
|
|
469
|
-
|
|
470
|
-
tokens = {
|
|
471
|
-
'general': [
|
|
472
|
-
(r'%.*?\n', Comment),
|
|
473
|
-
(r'[{}]', Name.Builtin),
|
|
474
|
-
(r'[&_^]', Name.Builtin),
|
|
475
|
-
],
|
|
476
|
-
'root': [
|
|
477
|
-
(r'\\\[', String.Backtick, 'displaymath'),
|
|
478
|
-
(r'\\\(', String, 'inlinemath'),
|
|
479
|
-
(r'\$\$', String.Backtick, 'displaymath'),
|
|
480
|
-
(r'\$', String, 'inlinemath'),
|
|
481
|
-
(r'\\([a-zA-Z]+|.)', Keyword, 'command'),
|
|
482
|
-
include('general'),
|
|
483
|
-
(r'[^\\$%&_^{}]+', Text),
|
|
484
|
-
],
|
|
485
|
-
'math': [
|
|
486
|
-
(r'\\([a-zA-Z]+|.)', Name.Variable),
|
|
487
|
-
include('general'),
|
|
488
|
-
(r'[0-9]+', Number),
|
|
489
|
-
(r'[-=!+*/()\[\]]', Operator),
|
|
490
|
-
(r'[^=!+*/()\[\]\\$%&_^{}0-9-]+', Name.Builtin),
|
|
491
|
-
],
|
|
492
|
-
'inlinemath': [
|
|
493
|
-
(r'\\\)', String, '#pop'),
|
|
494
|
-
(r'\$', String, '#pop'),
|
|
495
|
-
include('math'),
|
|
496
|
-
],
|
|
497
|
-
'displaymath': [
|
|
498
|
-
(r'\\\]', String, '#pop'),
|
|
499
|
-
(r'\$\$', String, '#pop'),
|
|
500
|
-
(r'\$', Name.Builtin),
|
|
501
|
-
include('math'),
|
|
502
|
-
],
|
|
503
|
-
'command': [
|
|
504
|
-
(r'\[.*?\]', Name.Attribute),
|
|
505
|
-
(r'\*', Keyword),
|
|
506
|
-
default('#pop'),
|
|
507
|
-
],
|
|
508
|
-
}
|
|
509
|
-
|
|
510
|
-
def analyse_text(text):
|
|
511
|
-
for start in ("\\documentclass", "\\input", "\\documentstyle",
|
|
512
|
-
"\\relax"):
|
|
513
|
-
if text[:len(start)] == start:
|
|
514
|
-
return True
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
class GroffLexer(RegexLexer):
|
|
518
|
-
"""
|
|
519
|
-
Lexer for the (g)roff typesetting language, supporting groff
|
|
520
|
-
extensions. Mainly useful for highlighting manpage sources.
|
|
521
|
-
|
|
522
|
-
.. versionadded:: 0.6
|
|
523
|
-
"""
|
|
524
|
-
|
|
525
|
-
name = 'Groff'
|
|
526
|
-
aliases = ['groff', 'nroff', 'man']
|
|
527
|
-
filenames = ['*.[1234567]', '*.man']
|
|
528
|
-
mimetypes = ['application/x-troff', 'text/troff']
|
|
529
|
-
|
|
530
|
-
tokens = {
|
|
531
|
-
'root': [
|
|
532
|
-
(r'(\.)(\w+)', bygroups(Text, Keyword), 'request'),
|
|
533
|
-
(r'\.', Punctuation, 'request'),
|
|
534
|
-
# Regular characters, slurp till we find a backslash or newline
|
|
535
|
-
(r'[^\\\n]*', Text, 'textline'),
|
|
536
|
-
],
|
|
537
|
-
'textline': [
|
|
538
|
-
include('escapes'),
|
|
539
|
-
(r'[^\\\n]+', Text),
|
|
540
|
-
(r'\n', Text, '#pop'),
|
|
541
|
-
],
|
|
542
|
-
'escapes': [
|
|
543
|
-
# groff has many ways to write escapes.
|
|
544
|
-
(r'\\"[^\n]*', Comment),
|
|
545
|
-
(r'\\[fn]\w', String.Escape),
|
|
546
|
-
(r'\\\(.{2}', String.Escape),
|
|
547
|
-
(r'\\.\[.*\]', String.Escape),
|
|
548
|
-
(r'\\.', String.Escape),
|
|
549
|
-
(r'\\\n', Text, 'request'),
|
|
550
|
-
],
|
|
551
|
-
'request': [
|
|
552
|
-
(r'\n', Text, '#pop'),
|
|
553
|
-
include('escapes'),
|
|
554
|
-
(r'"[^\n"]+"', String.Double),
|
|
555
|
-
(r'\d+', Number),
|
|
556
|
-
(r'\S+', String),
|
|
557
|
-
(r'\s+', Text),
|
|
558
|
-
],
|
|
559
|
-
}
|
|
560
|
-
|
|
561
|
-
def analyse_text(text):
|
|
562
|
-
if text[:1] != '.':
|
|
563
|
-
return False
|
|
564
|
-
if text[:3] == '.\\"':
|
|
565
|
-
return True
|
|
566
|
-
if text[:4] == '.TH ':
|
|
567
|
-
return True
|
|
568
|
-
if text[1:3].isalnum() and text[3].isspace():
|
|
569
|
-
return 0.9
|
|
570
|
-
|
|
571
|
-
|
|
572
|
-
class ApacheConfLexer(RegexLexer):
|
|
573
|
-
"""
|
|
574
|
-
Lexer for configuration files following the Apache config file
|
|
575
|
-
format.
|
|
576
|
-
|
|
577
|
-
.. versionadded:: 0.6
|
|
578
|
-
"""
|
|
579
|
-
|
|
580
|
-
name = 'ApacheConf'
|
|
581
|
-
aliases = ['apacheconf', 'aconf', 'apache']
|
|
582
|
-
filenames = ['.htaccess', 'apache.conf', 'apache2.conf']
|
|
583
|
-
mimetypes = ['text/x-apacheconf']
|
|
584
|
-
flags = re.MULTILINE | re.IGNORECASE
|
|
585
|
-
|
|
586
|
-
tokens = {
|
|
587
|
-
'root': [
|
|
588
|
-
(r'\s+', Text),
|
|
589
|
-
(r'(#.*?)$', Comment),
|
|
590
|
-
(r'(<[^\s>]+)(?:(\s+)(.*?))?(>)',
|
|
591
|
-
bygroups(Name.Tag, Text, String, Name.Tag)),
|
|
592
|
-
(r'([a-z]\w*)(\s+)',
|
|
593
|
-
bygroups(Name.Builtin, Text), 'value'),
|
|
594
|
-
(r'\.+', Text),
|
|
595
|
-
],
|
|
596
|
-
'value': [
|
|
597
|
-
(r'$', Text, '#pop'),
|
|
598
|
-
(r'[^\S\n]+', Text),
|
|
599
|
-
(r'\d+\.\d+\.\d+\.\d+(?:/\d+)?', Number),
|
|
600
|
-
(r'\d+', Number),
|
|
601
|
-
(r'/([a-z0-9][\w./-]+)', String.Other),
|
|
602
|
-
(r'(on|off|none|any|all|double|email|dns|min|minimal|'
|
|
603
|
-
r'os|productonly|full|emerg|alert|crit|error|warn|'
|
|
604
|
-
r'notice|info|debug|registry|script|inetd|standalone|'
|
|
605
|
-
r'user|group)\b', Keyword),
|
|
606
|
-
(r'"([^"\\]*(?:\\.[^"\\]*)*)"', String.Double),
|
|
607
|
-
(r'[^\s"]+', Text)
|
|
608
|
-
]
|
|
609
|
-
}
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
class MoinWikiLexer(RegexLexer):
|
|
613
|
-
"""
|
|
614
|
-
For MoinMoin (and Trac) Wiki markup.
|
|
615
|
-
|
|
616
|
-
.. versionadded:: 0.7
|
|
617
|
-
"""
|
|
618
|
-
|
|
619
|
-
name = 'MoinMoin/Trac Wiki markup'
|
|
620
|
-
aliases = ['trac-wiki', 'moin']
|
|
621
|
-
filenames = []
|
|
622
|
-
mimetypes = ['text/x-trac-wiki']
|
|
623
|
-
flags = re.MULTILINE | re.IGNORECASE
|
|
624
|
-
|
|
625
|
-
tokens = {
|
|
626
|
-
'root': [
|
|
627
|
-
(r'^#.*$', Comment),
|
|
628
|
-
(r'(!)(\S+)', bygroups(Keyword, Text)), # Ignore-next
|
|
629
|
-
# Titles
|
|
630
|
-
(r'^(=+)([^=]+)(=+)(\s*#.+)?$',
|
|
631
|
-
bygroups(Generic.Heading, using(this), Generic.Heading, String)),
|
|
632
|
-
# Literal code blocks, with optional shebang
|
|
633
|
-
(r'({{{)(\n#!.+)?', bygroups(Name.Builtin, Name.Namespace), 'codeblock'),
|
|
634
|
-
(r'(\'\'\'?|\|\||`|__|~~|\^|,,|::)', Comment), # Formatting
|
|
635
|
-
# Lists
|
|
636
|
-
(r'^( +)([.*-])( )', bygroups(Text, Name.Builtin, Text)),
|
|
637
|
-
(r'^( +)([a-z]{1,5}\.)( )', bygroups(Text, Name.Builtin, Text)),
|
|
638
|
-
# Other Formatting
|
|
639
|
-
(r'\[\[\w+.*?\]\]', Keyword), # Macro
|
|
640
|
-
(r'(\[[^\s\]]+)(\s+[^\]]+?)?(\])',
|
|
641
|
-
bygroups(Keyword, String, Keyword)), # Link
|
|
642
|
-
(r'^----+$', Keyword), # Horizontal rules
|
|
643
|
-
(r'[^\n\'\[{!_~^,|]+', Text),
|
|
644
|
-
(r'\n', Text),
|
|
645
|
-
(r'.', Text),
|
|
646
|
-
],
|
|
647
|
-
'codeblock': [
|
|
648
|
-
(r'}}}', Name.Builtin, '#pop'),
|
|
649
|
-
# these blocks are allowed to be nested in Trac, but not MoinMoin
|
|
650
|
-
(r'{{{', Text, '#push'),
|
|
651
|
-
(r'[^{}]+', Comment.Preproc), # slurp boring text
|
|
652
|
-
(r'.', Comment.Preproc), # allow loose { or }
|
|
653
|
-
],
|
|
654
|
-
}
|
|
655
|
-
|
|
656
|
-
|
|
657
|
-
class RstLexer(RegexLexer):
|
|
658
|
-
"""
|
|
659
|
-
For `reStructuredText <http://docutils.sf.net/rst.html>`_ markup.
|
|
660
|
-
|
|
661
|
-
.. versionadded:: 0.7
|
|
662
|
-
|
|
663
|
-
Additional options accepted:
|
|
664
|
-
|
|
665
|
-
`handlecodeblocks`
|
|
666
|
-
Highlight the contents of ``.. sourcecode:: language``,
|
|
667
|
-
``.. code:: language`` and ``.. code-block:: language``
|
|
668
|
-
directives with a lexer for the given language (default:
|
|
669
|
-
``True``).
|
|
670
|
-
|
|
671
|
-
.. versionadded:: 0.8
|
|
672
|
-
"""
|
|
673
|
-
name = 'reStructuredText'
|
|
674
|
-
aliases = ['rst', 'rest', 'restructuredtext']
|
|
675
|
-
filenames = ['*.rst', '*.rest']
|
|
676
|
-
mimetypes = ["text/x-rst", "text/prs.fallenstein.rst"]
|
|
677
|
-
flags = re.MULTILINE
|
|
678
|
-
|
|
679
|
-
def _handle_sourcecode(self, match):
|
|
680
|
-
from pygments.lexers import get_lexer_by_name
|
|
681
|
-
|
|
682
|
-
# section header
|
|
683
|
-
yield match.start(1), Punctuation, match.group(1)
|
|
684
|
-
yield match.start(2), Text, match.group(2)
|
|
685
|
-
yield match.start(3), Operator.Word, match.group(3)
|
|
686
|
-
yield match.start(4), Punctuation, match.group(4)
|
|
687
|
-
yield match.start(5), Text, match.group(5)
|
|
688
|
-
yield match.start(6), Keyword, match.group(6)
|
|
689
|
-
yield match.start(7), Text, match.group(7)
|
|
690
|
-
|
|
691
|
-
# lookup lexer if wanted and existing
|
|
692
|
-
lexer = None
|
|
693
|
-
if self.handlecodeblocks:
|
|
694
|
-
try:
|
|
695
|
-
lexer = get_lexer_by_name(match.group(6).strip())
|
|
696
|
-
except ClassNotFound:
|
|
697
|
-
pass
|
|
698
|
-
indention = match.group(8)
|
|
699
|
-
indention_size = len(indention)
|
|
700
|
-
code = (indention + match.group(9) + match.group(10) + match.group(11))
|
|
701
|
-
|
|
702
|
-
# no lexer for this language. handle it like it was a code block
|
|
703
|
-
if lexer is None:
|
|
704
|
-
yield match.start(8), String, code
|
|
705
|
-
return
|
|
706
|
-
|
|
707
|
-
# highlight the lines with the lexer.
|
|
708
|
-
ins = []
|
|
709
|
-
codelines = code.splitlines(True)
|
|
710
|
-
code = ''
|
|
711
|
-
for line in codelines:
|
|
712
|
-
if len(line) > indention_size:
|
|
713
|
-
ins.append((len(code), [(0, Text, line[:indention_size])]))
|
|
714
|
-
code += line[indention_size:]
|
|
715
|
-
else:
|
|
716
|
-
code += line
|
|
717
|
-
for item in do_insertions(ins, lexer.get_tokens_unprocessed(code)):
|
|
718
|
-
yield item
|
|
719
|
-
|
|
720
|
-
# from docutils.parsers.rst.states
|
|
721
|
-
closers = u'\'")]}>\u2019\u201d\xbb!?'
|
|
722
|
-
unicode_delimiters = u'\u2010\u2011\u2012\u2013\u2014\u00a0'
|
|
723
|
-
end_string_suffix = (r'((?=$)|(?=[-/:.,; \n\x00%s%s]))'
|
|
724
|
-
% (re.escape(unicode_delimiters),
|
|
725
|
-
re.escape(closers)))
|
|
726
|
-
|
|
727
|
-
tokens = {
|
|
728
|
-
'root': [
|
|
729
|
-
# Heading with overline
|
|
730
|
-
(r'^(=+|-+|`+|:+|\.+|\'+|"+|~+|\^+|_+|\*+|\++|#+)([ \t]*\n)'
|
|
731
|
-
r'(.+)(\n)(\1)(\n)',
|
|
732
|
-
bygroups(Generic.Heading, Text, Generic.Heading,
|
|
733
|
-
Text, Generic.Heading, Text)),
|
|
734
|
-
# Plain heading
|
|
735
|
-
(r'^(\S.*)(\n)(={3,}|-{3,}|`{3,}|:{3,}|\.{3,}|\'{3,}|"{3,}|'
|
|
736
|
-
r'~{3,}|\^{3,}|_{3,}|\*{3,}|\+{3,}|#{3,})(\n)',
|
|
737
|
-
bygroups(Generic.Heading, Text, Generic.Heading, Text)),
|
|
738
|
-
# Bulleted lists
|
|
739
|
-
(r'^(\s*)([-*+])( .+\n(?:\1 .+\n)*)',
|
|
740
|
-
bygroups(Text, Number, using(this, state='inline'))),
|
|
741
|
-
# Numbered lists
|
|
742
|
-
(r'^(\s*)([0-9#ivxlcmIVXLCM]+\.)( .+\n(?:\1 .+\n)*)',
|
|
743
|
-
bygroups(Text, Number, using(this, state='inline'))),
|
|
744
|
-
(r'^(\s*)(\(?[0-9#ivxlcmIVXLCM]+\))( .+\n(?:\1 .+\n)*)',
|
|
745
|
-
bygroups(Text, Number, using(this, state='inline'))),
|
|
746
|
-
# Numbered, but keep words at BOL from becoming lists
|
|
747
|
-
(r'^(\s*)([A-Z]+\.)( .+\n(?:\1 .+\n)+)',
|
|
748
|
-
bygroups(Text, Number, using(this, state='inline'))),
|
|
749
|
-
(r'^(\s*)(\(?[A-Za-z]+\))( .+\n(?:\1 .+\n)+)',
|
|
750
|
-
bygroups(Text, Number, using(this, state='inline'))),
|
|
751
|
-
# Line blocks
|
|
752
|
-
(r'^(\s*)(\|)( .+\n(?:\| .+\n)*)',
|
|
753
|
-
bygroups(Text, Operator, using(this, state='inline'))),
|
|
754
|
-
# Sourcecode directives
|
|
755
|
-
(r'^( *\.\.)(\s*)((?:source)?code(?:-block)?)(::)([ \t]*)([^\n]+)'
|
|
756
|
-
r'(\n[ \t]*\n)([ \t]+)(.*)(\n)((?:(?:\8.*|)\n)+)',
|
|
757
|
-
_handle_sourcecode),
|
|
758
|
-
# A directive
|
|
759
|
-
(r'^( *\.\.)(\s*)([\w:-]+?)(::)(?:([ \t]*)(.*))',
|
|
760
|
-
bygroups(Punctuation, Text, Operator.Word, Punctuation, Text,
|
|
761
|
-
using(this, state='inline'))),
|
|
762
|
-
# A reference target
|
|
763
|
-
(r'^( *\.\.)(\s*)(_(?:[^:\\]|\\.)+:)(.*?)$',
|
|
764
|
-
bygroups(Punctuation, Text, Name.Tag, using(this, state='inline'))),
|
|
765
|
-
# A footnote/citation target
|
|
766
|
-
(r'^( *\.\.)(\s*)(\[.+\])(.*?)$',
|
|
767
|
-
bygroups(Punctuation, Text, Name.Tag, using(this, state='inline'))),
|
|
768
|
-
# A substitution def
|
|
769
|
-
(r'^( *\.\.)(\s*)(\|.+\|)(\s*)([\w:-]+?)(::)(?:([ \t]*)(.*))',
|
|
770
|
-
bygroups(Punctuation, Text, Name.Tag, Text, Operator.Word,
|
|
771
|
-
Punctuation, Text, using(this, state='inline'))),
|
|
772
|
-
# Comments
|
|
773
|
-
(r'^ *\.\..*(\n( +.*\n|\n)+)?', Comment.Preproc),
|
|
774
|
-
# Field list
|
|
775
|
-
(r'^( *)(:[a-zA-Z-]+:)(\s*)$', bygroups(Text, Name.Class, Text)),
|
|
776
|
-
(r'^( *)(:.*?:)([ \t]+)(.*?)$',
|
|
777
|
-
bygroups(Text, Name.Class, Text, Name.Function)),
|
|
778
|
-
# Definition list
|
|
779
|
-
(r'^(\S.*(?<!::)\n)((?:(?: +.*)\n)+)',
|
|
780
|
-
bygroups(using(this, state='inline'), using(this, state='inline'))),
|
|
781
|
-
# Code blocks
|
|
782
|
-
(r'(::)(\n[ \t]*\n)([ \t]+)(.*)(\n)((?:(?:\3.*|)\n)+)',
|
|
783
|
-
bygroups(String.Escape, Text, String, String, Text, String)),
|
|
784
|
-
include('inline'),
|
|
785
|
-
],
|
|
786
|
-
'inline': [
|
|
787
|
-
(r'\\.', Text), # escape
|
|
788
|
-
(r'``', String, 'literal'), # code
|
|
789
|
-
(r'(`.+?)(<.+?>)(`__?)', # reference with inline target
|
|
790
|
-
bygroups(String, String.Interpol, String)),
|
|
791
|
-
(r'`.+?`__?', String), # reference
|
|
792
|
-
(r'(`.+?`)(:[a-zA-Z0-9:-]+?:)?',
|
|
793
|
-
bygroups(Name.Variable, Name.Attribute)), # role
|
|
794
|
-
(r'(:[a-zA-Z0-9:-]+?:)(`.+?`)',
|
|
795
|
-
bygroups(Name.Attribute, Name.Variable)), # role (content first)
|
|
796
|
-
(r'\*\*.+?\*\*', Generic.Strong), # Strong emphasis
|
|
797
|
-
(r'\*.+?\*', Generic.Emph), # Emphasis
|
|
798
|
-
(r'\[.*?\]_', String), # Footnote or citation
|
|
799
|
-
(r'<.+?>', Name.Tag), # Hyperlink
|
|
800
|
-
(r'[^\\\n\[*`:]+', Text),
|
|
801
|
-
(r'.', Text),
|
|
802
|
-
],
|
|
803
|
-
'literal': [
|
|
804
|
-
(r'[^`]+', String),
|
|
805
|
-
(r'``' + end_string_suffix, String, '#pop'),
|
|
806
|
-
(r'`', String),
|
|
807
|
-
]
|
|
808
|
-
}
|
|
809
|
-
|
|
810
|
-
def __init__(self, **options):
|
|
811
|
-
self.handlecodeblocks = get_bool_opt(options, 'handlecodeblocks', True)
|
|
812
|
-
RegexLexer.__init__(self, **options)
|
|
813
|
-
|
|
814
|
-
def analyse_text(text):
|
|
815
|
-
if text[:2] == '..' and text[2:3] != '.':
|
|
816
|
-
return 0.3
|
|
817
|
-
p1 = text.find("\n")
|
|
818
|
-
p2 = text.find("\n", p1 + 1)
|
|
819
|
-
if (p2 > -1 and # has two lines
|
|
820
|
-
p1 * 2 + 1 == p2 and # they are the same length
|
|
821
|
-
text[p1+1] in '-=' and # the next line both starts and ends with
|
|
822
|
-
text[p1+1] == text[p2-1]): # ...a sufficiently high header
|
|
823
|
-
return 0.5
|
|
824
|
-
|
|
825
|
-
|
|
826
|
-
class VimLexer(RegexLexer):
|
|
827
|
-
"""
|
|
828
|
-
Lexer for VimL script files.
|
|
829
|
-
|
|
830
|
-
.. versionadded:: 0.8
|
|
831
|
-
"""
|
|
832
|
-
name = 'VimL'
|
|
833
|
-
aliases = ['vim']
|
|
834
|
-
filenames = ['*.vim', '.vimrc', '.exrc', '.gvimrc',
|
|
835
|
-
'_vimrc', '_exrc', '_gvimrc', 'vimrc', 'gvimrc']
|
|
836
|
-
mimetypes = ['text/x-vim']
|
|
837
|
-
flags = re.MULTILINE
|
|
838
|
-
|
|
839
|
-
_python = r'py(?:t(?:h(?:o(?:n)?)?)?)?'
|
|
840
|
-
|
|
841
|
-
tokens = {
|
|
842
|
-
'root': [
|
|
843
|
-
(r'^([ \t:]*)(' + _python + r')([ \t]*)(<<)([ \t]*)(.*)((?:\n|.)*)(\6)',
|
|
844
|
-
bygroups(using(this), Keyword, Text, Operator, Text, Text,
|
|
845
|
-
using(PythonLexer), Text)),
|
|
846
|
-
(r'^([ \t:]*)(' + _python + r')([ \t])(.*)',
|
|
847
|
-
bygroups(using(this), Keyword, Text, using(PythonLexer))),
|
|
848
|
-
|
|
849
|
-
(r'^\s*".*', Comment),
|
|
850
|
-
|
|
851
|
-
(r'[ \t]+', Text),
|
|
852
|
-
# TODO: regexes can have other delims
|
|
853
|
-
(r'/(\\\\|\\/|[^\n/])*/', String.Regex),
|
|
854
|
-
(r'"(\\\\|\\"|[^\n"])*"', String.Double),
|
|
855
|
-
(r"'(''|[^\n'])*'", String.Single),
|
|
856
|
-
|
|
857
|
-
# Who decided that doublequote was a good comment character??
|
|
858
|
-
(r'(?<=\s)"[^\-:.%#=*].*', Comment),
|
|
859
|
-
(r'-?\d+', Number),
|
|
860
|
-
(r'#[0-9a-f]{6}', Number.Hex),
|
|
861
|
-
(r'^:', Punctuation),
|
|
862
|
-
(r'[()<>+=!|,~-]', Punctuation), # Inexact list. Looks decent.
|
|
863
|
-
(r'\b(let|if|else|endif|elseif|fun|function|endfunction)\b',
|
|
864
|
-
Keyword),
|
|
865
|
-
(r'\b(NONE|bold|italic|underline|dark|light)\b', Name.Builtin),
|
|
866
|
-
(r'\b\w+\b', Name.Other), # These are postprocessed below
|
|
867
|
-
(r'.', Text),
|
|
868
|
-
],
|
|
869
|
-
}
|
|
870
|
-
def __init__(self, **options):
|
|
871
|
-
from pygments.lexers._vimbuiltins import command, option, auto
|
|
872
|
-
self._cmd = command
|
|
873
|
-
self._opt = option
|
|
874
|
-
self._aut = auto
|
|
875
|
-
|
|
876
|
-
RegexLexer.__init__(self, **options)
|
|
877
|
-
|
|
878
|
-
def is_in(self, w, mapping):
|
|
879
|
-
r"""
|
|
880
|
-
It's kind of difficult to decide if something might be a keyword
|
|
881
|
-
in VimL because it allows you to abbreviate them. In fact,
|
|
882
|
-
'ab[breviate]' is a good example. :ab, :abbre, or :abbreviate are
|
|
883
|
-
valid ways to call it so rather than making really awful regexps
|
|
884
|
-
like::
|
|
885
|
-
|
|
886
|
-
\bab(?:b(?:r(?:e(?:v(?:i(?:a(?:t(?:e)?)?)?)?)?)?)?)?\b
|
|
887
|
-
|
|
888
|
-
we match `\b\w+\b` and then call is_in() on those tokens. See
|
|
889
|
-
`scripts/get_vimkw.py` for how the lists are extracted.
|
|
890
|
-
"""
|
|
891
|
-
p = bisect(mapping, (w,))
|
|
892
|
-
if p > 0:
|
|
893
|
-
if mapping[p-1][0] == w[:len(mapping[p-1][0])] and \
|
|
894
|
-
mapping[p-1][1][:len(w)] == w: return True
|
|
895
|
-
if p < len(mapping):
|
|
896
|
-
return mapping[p][0] == w[:len(mapping[p][0])] and \
|
|
897
|
-
mapping[p][1][:len(w)] == w
|
|
898
|
-
return False
|
|
899
|
-
|
|
900
|
-
def get_tokens_unprocessed(self, text):
|
|
901
|
-
# TODO: builtins are only subsequent tokens on lines
|
|
902
|
-
# and 'keywords' only happen at the beginning except
|
|
903
|
-
# for :au ones
|
|
904
|
-
for index, token, value in \
|
|
905
|
-
RegexLexer.get_tokens_unprocessed(self, text):
|
|
906
|
-
if token is Name.Other:
|
|
907
|
-
if self.is_in(value, self._cmd):
|
|
908
|
-
yield index, Keyword, value
|
|
909
|
-
elif self.is_in(value, self._opt) or \
|
|
910
|
-
self.is_in(value, self._aut):
|
|
911
|
-
yield index, Name.Builtin, value
|
|
912
|
-
else:
|
|
913
|
-
yield index, Text, value
|
|
914
|
-
else:
|
|
915
|
-
yield index, token, value
|
|
916
|
-
|
|
917
|
-
|
|
918
|
-
class GettextLexer(RegexLexer):
|
|
919
|
-
"""
|
|
920
|
-
Lexer for Gettext catalog files.
|
|
921
|
-
|
|
922
|
-
.. versionadded:: 0.9
|
|
923
|
-
"""
|
|
924
|
-
name = 'Gettext Catalog'
|
|
925
|
-
aliases = ['pot', 'po']
|
|
926
|
-
filenames = ['*.pot', '*.po']
|
|
927
|
-
mimetypes = ['application/x-gettext', 'text/x-gettext', 'text/gettext']
|
|
928
|
-
|
|
929
|
-
tokens = {
|
|
930
|
-
'root': [
|
|
931
|
-
(r'^#,\s.*?$', Keyword.Type),
|
|
932
|
-
(r'^#:\s.*?$', Keyword.Declaration),
|
|
933
|
-
#(r'^#$', Comment),
|
|
934
|
-
(r'^(#|#\.\s|#\|\s|#~\s|#\s).*$', Comment.Single),
|
|
935
|
-
(r'^(")([A-Za-z-]+:)(.*")$',
|
|
936
|
-
bygroups(String, Name.Property, String)),
|
|
937
|
-
(r'^".*"$', String),
|
|
938
|
-
(r'^(msgid|msgid_plural|msgstr)(\s+)(".*")$',
|
|
939
|
-
bygroups(Name.Variable, Text, String)),
|
|
940
|
-
(r'^(msgstr\[)(\d)(\])(\s+)(".*")$',
|
|
941
|
-
bygroups(Name.Variable, Number.Integer, Name.Variable, Text, String)),
|
|
942
|
-
]
|
|
943
|
-
}
|
|
944
|
-
|
|
945
|
-
|
|
946
|
-
class SquidConfLexer(RegexLexer):
|
|
947
|
-
"""
|
|
948
|
-
Lexer for `squid <http://www.squid-cache.org/>`_ configuration files.
|
|
949
|
-
|
|
950
|
-
.. versionadded:: 0.9
|
|
951
|
-
"""
|
|
952
|
-
|
|
953
|
-
name = 'SquidConf'
|
|
954
|
-
aliases = ['squidconf', 'squid.conf', 'squid']
|
|
955
|
-
filenames = ['squid.conf']
|
|
956
|
-
mimetypes = ['text/x-squidconf']
|
|
957
|
-
flags = re.IGNORECASE
|
|
958
|
-
|
|
959
|
-
keywords = [
|
|
960
|
-
"access_log", "acl", "always_direct", "announce_host",
|
|
961
|
-
"announce_period", "announce_port", "announce_to", "anonymize_headers",
|
|
962
|
-
"append_domain", "as_whois_server", "auth_param_basic",
|
|
963
|
-
"authenticate_children", "authenticate_program", "authenticate_ttl",
|
|
964
|
-
"broken_posts", "buffered_logs", "cache_access_log", "cache_announce",
|
|
965
|
-
"cache_dir", "cache_dns_program", "cache_effective_group",
|
|
966
|
-
"cache_effective_user", "cache_host", "cache_host_acl",
|
|
967
|
-
"cache_host_domain", "cache_log", "cache_mem", "cache_mem_high",
|
|
968
|
-
"cache_mem_low", "cache_mgr", "cachemgr_passwd", "cache_peer",
|
|
969
|
-
"cache_peer_access", "cahce_replacement_policy", "cache_stoplist",
|
|
970
|
-
"cache_stoplist_pattern", "cache_store_log", "cache_swap",
|
|
971
|
-
"cache_swap_high", "cache_swap_log", "cache_swap_low", "client_db",
|
|
972
|
-
"client_lifetime", "client_netmask", "connect_timeout", "coredump_dir",
|
|
973
|
-
"dead_peer_timeout", "debug_options", "delay_access", "delay_class",
|
|
974
|
-
"delay_initial_bucket_level", "delay_parameters", "delay_pools",
|
|
975
|
-
"deny_info", "dns_children", "dns_defnames", "dns_nameservers",
|
|
976
|
-
"dns_testnames", "emulate_httpd_log", "err_html_text",
|
|
977
|
-
"fake_user_agent", "firewall_ip", "forwarded_for", "forward_snmpd_port",
|
|
978
|
-
"fqdncache_size", "ftpget_options", "ftpget_program", "ftp_list_width",
|
|
979
|
-
"ftp_passive", "ftp_user", "half_closed_clients", "header_access",
|
|
980
|
-
"header_replace", "hierarchy_stoplist", "high_response_time_warning",
|
|
981
|
-
"high_page_fault_warning", "hosts_file", "htcp_port", "http_access",
|
|
982
|
-
"http_anonymizer", "httpd_accel", "httpd_accel_host",
|
|
983
|
-
"httpd_accel_port", "httpd_accel_uses_host_header",
|
|
984
|
-
"httpd_accel_with_proxy", "http_port", "http_reply_access",
|
|
985
|
-
"icp_access", "icp_hit_stale", "icp_port", "icp_query_timeout",
|
|
986
|
-
"ident_lookup", "ident_lookup_access", "ident_timeout",
|
|
987
|
-
"incoming_http_average", "incoming_icp_average", "inside_firewall",
|
|
988
|
-
"ipcache_high", "ipcache_low", "ipcache_size", "local_domain",
|
|
989
|
-
"local_ip", "logfile_rotate", "log_fqdn", "log_icp_queries",
|
|
990
|
-
"log_mime_hdrs", "maximum_object_size", "maximum_single_addr_tries",
|
|
991
|
-
"mcast_groups", "mcast_icp_query_timeout", "mcast_miss_addr",
|
|
992
|
-
"mcast_miss_encode_key", "mcast_miss_port", "memory_pools",
|
|
993
|
-
"memory_pools_limit", "memory_replacement_policy", "mime_table",
|
|
994
|
-
"min_http_poll_cnt", "min_icp_poll_cnt", "minimum_direct_hops",
|
|
995
|
-
"minimum_object_size", "minimum_retry_timeout", "miss_access",
|
|
996
|
-
"negative_dns_ttl", "negative_ttl", "neighbor_timeout",
|
|
997
|
-
"neighbor_type_domain", "netdb_high", "netdb_low", "netdb_ping_period",
|
|
998
|
-
"netdb_ping_rate", "never_direct", "no_cache", "passthrough_proxy",
|
|
999
|
-
"pconn_timeout", "pid_filename", "pinger_program", "positive_dns_ttl",
|
|
1000
|
-
"prefer_direct", "proxy_auth", "proxy_auth_realm", "query_icmp",
|
|
1001
|
-
"quick_abort", "quick_abort", "quick_abort_max", "quick_abort_min",
|
|
1002
|
-
"quick_abort_pct", "range_offset_limit", "read_timeout",
|
|
1003
|
-
"redirect_children", "redirect_program",
|
|
1004
|
-
"redirect_rewrites_host_header", "reference_age", "reference_age",
|
|
1005
|
-
"refresh_pattern", "reload_into_ims", "request_body_max_size",
|
|
1006
|
-
"request_size", "request_timeout", "shutdown_lifetime",
|
|
1007
|
-
"single_parent_bypass", "siteselect_timeout", "snmp_access",
|
|
1008
|
-
"snmp_incoming_address", "snmp_port", "source_ping", "ssl_proxy",
|
|
1009
|
-
"store_avg_object_size", "store_objects_per_bucket",
|
|
1010
|
-
"strip_query_terms", "swap_level1_dirs", "swap_level2_dirs",
|
|
1011
|
-
"tcp_incoming_address", "tcp_outgoing_address", "tcp_recv_bufsize",
|
|
1012
|
-
"test_reachability", "udp_hit_obj", "udp_hit_obj_size",
|
|
1013
|
-
"udp_incoming_address", "udp_outgoing_address", "unique_hostname",
|
|
1014
|
-
"unlinkd_program", "uri_whitespace", "useragent_log",
|
|
1015
|
-
"visible_hostname", "wais_relay", "wais_relay_host", "wais_relay_port",
|
|
1016
|
-
]
|
|
1017
|
-
|
|
1018
|
-
opts = [
|
|
1019
|
-
"proxy-only", "weight", "ttl", "no-query", "default", "round-robin",
|
|
1020
|
-
"multicast-responder", "on", "off", "all", "deny", "allow", "via",
|
|
1021
|
-
"parent", "no-digest", "heap", "lru", "realm", "children", "q1", "q2",
|
|
1022
|
-
"credentialsttl", "none", "disable", "offline_toggle", "diskd",
|
|
1023
|
-
]
|
|
1024
|
-
|
|
1025
|
-
actions = [
|
|
1026
|
-
"shutdown", "info", "parameter", "server_list", "client_list",
|
|
1027
|
-
r'squid\.conf',
|
|
1028
|
-
]
|
|
1029
|
-
|
|
1030
|
-
actions_stats = [
|
|
1031
|
-
"objects", "vm_objects", "utilization", "ipcache", "fqdncache", "dns",
|
|
1032
|
-
"redirector", "io", "reply_headers", "filedescriptors", "netdb",
|
|
1033
|
-
]
|
|
1034
|
-
|
|
1035
|
-
actions_log = ["status", "enable", "disable", "clear"]
|
|
1036
|
-
|
|
1037
|
-
acls = [
|
|
1038
|
-
"url_regex", "urlpath_regex", "referer_regex", "port", "proto",
|
|
1039
|
-
"req_mime_type", "rep_mime_type", "method", "browser", "user", "src",
|
|
1040
|
-
"dst", "time", "dstdomain", "ident", "snmp_community",
|
|
1041
|
-
]
|
|
1042
|
-
|
|
1043
|
-
ip_re = (
|
|
1044
|
-
r'(?:(?:(?:[3-9]\d?|2(?:5[0-5]|[0-4]?\d)?|1\d{0,2}|0x0*[0-9a-f]{1,2}|'
|
|
1045
|
-
r'0+[1-3]?[0-7]{0,2})(?:\.(?:[3-9]\d?|2(?:5[0-5]|[0-4]?\d)?|1\d{0,2}|'
|
|
1046
|
-
r'0x0*[0-9a-f]{1,2}|0+[1-3]?[0-7]{0,2})){3})|(?!.*::.*::)(?:(?!:)|'
|
|
1047
|
-
r':(?=:))(?:[0-9a-f]{0,4}(?:(?<=::)|(?<!::):)){6}(?:[0-9a-f]{0,4}'
|
|
1048
|
-
r'(?:(?<=::)|(?<!::):)[0-9a-f]{0,4}(?:(?<=::)|(?<!:)|(?<=:)(?<!::):)|'
|
|
1049
|
-
r'(?:25[0-4]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-4]|2[0-4]\d|1\d\d|'
|
|
1050
|
-
r'[1-9]?\d)){3}))'
|
|
1051
|
-
)
|
|
1052
|
-
|
|
1053
|
-
def makelistre(list):
|
|
1054
|
-
return r'\b(?:' + '|'.join(list) + r')\b'
|
|
1055
|
-
|
|
1056
|
-
tokens = {
|
|
1057
|
-
'root': [
|
|
1058
|
-
(r'\s+', Whitespace),
|
|
1059
|
-
(r'#', Comment, 'comment'),
|
|
1060
|
-
(makelistre(keywords), Keyword),
|
|
1061
|
-
(makelistre(opts), Name.Constant),
|
|
1062
|
-
# Actions
|
|
1063
|
-
(makelistre(actions), String),
|
|
1064
|
-
(r'stats/'+makelistre(actions), String),
|
|
1065
|
-
(r'log/'+makelistre(actions)+r'=', String),
|
|
1066
|
-
(makelistre(acls), Keyword),
|
|
1067
|
-
(ip_re + r'(?:/(?:' + ip_re + r'|\b\d+\b))?', Number.Float),
|
|
1068
|
-
(r'(?:\b\d+\b(?:-\b\d+|%)?)', Number),
|
|
1069
|
-
(r'\S+', Text),
|
|
1070
|
-
],
|
|
1071
|
-
'comment': [
|
|
1072
|
-
(r'\s*TAG:.*', String.Escape, '#pop'),
|
|
1073
|
-
(r'.*', Comment, '#pop'),
|
|
1074
|
-
],
|
|
1075
|
-
}
|
|
1076
|
-
|
|
1077
|
-
|
|
1078
|
-
class DebianControlLexer(RegexLexer):
|
|
1079
|
-
"""
|
|
1080
|
-
Lexer for Debian ``control`` files and ``apt-cache show <pkg>`` outputs.
|
|
1081
|
-
|
|
1082
|
-
.. versionadded:: 0.9
|
|
1083
|
-
"""
|
|
1084
|
-
name = 'Debian Control file'
|
|
1085
|
-
aliases = ['control', 'debcontrol']
|
|
1086
|
-
filenames = ['control']
|
|
1087
|
-
|
|
1088
|
-
tokens = {
|
|
1089
|
-
'root': [
|
|
1090
|
-
(r'^(Description)', Keyword, 'description'),
|
|
1091
|
-
(r'^(Maintainer)(:\s*)', bygroups(Keyword, Text), 'maintainer'),
|
|
1092
|
-
(r'^((Build-)?Depends)', Keyword, 'depends'),
|
|
1093
|
-
(r'^((?:Python-)?Version)(:\s*)(\S+)$',
|
|
1094
|
-
bygroups(Keyword, Text, Number)),
|
|
1095
|
-
(r'^((?:Installed-)?Size)(:\s*)(\S+)$',
|
|
1096
|
-
bygroups(Keyword, Text, Number)),
|
|
1097
|
-
(r'^(MD5Sum|SHA1|SHA256)(:\s*)(\S+)$',
|
|
1098
|
-
bygroups(Keyword, Text, Number)),
|
|
1099
|
-
(r'^([a-zA-Z\-0-9\.]*?)(:\s*)(.*?)$',
|
|
1100
|
-
bygroups(Keyword, Whitespace, String)),
|
|
1101
|
-
],
|
|
1102
|
-
'maintainer': [
|
|
1103
|
-
(r'<[^>]+>', Generic.Strong),
|
|
1104
|
-
(r'<[^>]+>$', Generic.Strong, '#pop'),
|
|
1105
|
-
(r',\n?', Text),
|
|
1106
|
-
(r'.', Text),
|
|
1107
|
-
],
|
|
1108
|
-
'description': [
|
|
1109
|
-
(r'(.*)(Homepage)(: )(\S+)',
|
|
1110
|
-
bygroups(Text, String, Name, Name.Class)),
|
|
1111
|
-
(r':.*\n', Generic.Strong),
|
|
1112
|
-
(r' .*\n', Text),
|
|
1113
|
-
('', Text, '#pop'),
|
|
1114
|
-
],
|
|
1115
|
-
'depends': [
|
|
1116
|
-
(r':\s*', Text),
|
|
1117
|
-
(r'(\$)(\{)(\w+\s*:\s*\w+)', bygroups(Operator, Text, Name.Entity)),
|
|
1118
|
-
(r'\(', Text, 'depend_vers'),
|
|
1119
|
-
(r',', Text),
|
|
1120
|
-
(r'\|', Operator),
|
|
1121
|
-
(r'[\s]+', Text),
|
|
1122
|
-
(r'[}\)]\s*$', Text, '#pop'),
|
|
1123
|
-
(r'}', Text),
|
|
1124
|
-
(r'[^,]$', Name.Function, '#pop'),
|
|
1125
|
-
(r'([\+\.a-zA-Z0-9-])(\s*)', bygroups(Name.Function, Text)),
|
|
1126
|
-
(r'\[.*?\]', Name.Entity),
|
|
1127
|
-
],
|
|
1128
|
-
'depend_vers': [
|
|
1129
|
-
(r'\),', Text, '#pop'),
|
|
1130
|
-
(r'\)[^,]', Text, '#pop:2'),
|
|
1131
|
-
(r'([><=]+)(\s*)([^\)]+)', bygroups(Operator, Text, Number))
|
|
1132
|
-
]
|
|
1133
|
-
}
|
|
1134
|
-
|
|
1135
|
-
|
|
1136
|
-
class YamlLexerContext(LexerContext):
|
|
1137
|
-
"""Indentation context for the YAML lexer."""
|
|
1138
|
-
|
|
1139
|
-
def __init__(self, *args, **kwds):
|
|
1140
|
-
super(YamlLexerContext, self).__init__(*args, **kwds)
|
|
1141
|
-
self.indent_stack = []
|
|
1142
|
-
self.indent = -1
|
|
1143
|
-
self.next_indent = 0
|
|
1144
|
-
self.block_scalar_indent = None
|
|
1145
|
-
|
|
1146
|
-
|
|
1147
|
-
class YamlLexer(ExtendedRegexLexer):
|
|
1148
|
-
"""
|
|
1149
|
-
Lexer for `YAML <http://yaml.org/>`_, a human-friendly data serialization
|
|
1150
|
-
language.
|
|
1151
|
-
|
|
1152
|
-
.. versionadded:: 0.11
|
|
1153
|
-
"""
|
|
1154
|
-
|
|
1155
|
-
name = 'YAML'
|
|
1156
|
-
aliases = ['yaml']
|
|
1157
|
-
filenames = ['*.yaml', '*.yml']
|
|
1158
|
-
mimetypes = ['text/x-yaml']
|
|
1159
|
-
|
|
1160
|
-
|
|
1161
|
-
def something(token_class):
|
|
1162
|
-
"""Do not produce empty tokens."""
|
|
1163
|
-
def callback(lexer, match, context):
|
|
1164
|
-
text = match.group()
|
|
1165
|
-
if not text:
|
|
1166
|
-
return
|
|
1167
|
-
yield match.start(), token_class, text
|
|
1168
|
-
context.pos = match.end()
|
|
1169
|
-
return callback
|
|
1170
|
-
|
|
1171
|
-
def reset_indent(token_class):
|
|
1172
|
-
"""Reset the indentation levels."""
|
|
1173
|
-
def callback(lexer, match, context):
|
|
1174
|
-
text = match.group()
|
|
1175
|
-
context.indent_stack = []
|
|
1176
|
-
context.indent = -1
|
|
1177
|
-
context.next_indent = 0
|
|
1178
|
-
context.block_scalar_indent = None
|
|
1179
|
-
yield match.start(), token_class, text
|
|
1180
|
-
context.pos = match.end()
|
|
1181
|
-
return callback
|
|
1182
|
-
|
|
1183
|
-
def save_indent(token_class, start=False):
|
|
1184
|
-
"""Save a possible indentation level."""
|
|
1185
|
-
def callback(lexer, match, context):
|
|
1186
|
-
text = match.group()
|
|
1187
|
-
extra = ''
|
|
1188
|
-
if start:
|
|
1189
|
-
context.next_indent = len(text)
|
|
1190
|
-
if context.next_indent < context.indent:
|
|
1191
|
-
while context.next_indent < context.indent:
|
|
1192
|
-
context.indent = context.indent_stack.pop()
|
|
1193
|
-
if context.next_indent > context.indent:
|
|
1194
|
-
extra = text[context.indent:]
|
|
1195
|
-
text = text[:context.indent]
|
|
1196
|
-
else:
|
|
1197
|
-
context.next_indent += len(text)
|
|
1198
|
-
if text:
|
|
1199
|
-
yield match.start(), token_class, text
|
|
1200
|
-
if extra:
|
|
1201
|
-
yield match.start()+len(text), token_class.Error, extra
|
|
1202
|
-
context.pos = match.end()
|
|
1203
|
-
return callback
|
|
1204
|
-
|
|
1205
|
-
def set_indent(token_class, implicit=False):
|
|
1206
|
-
"""Set the previously saved indentation level."""
|
|
1207
|
-
def callback(lexer, match, context):
|
|
1208
|
-
text = match.group()
|
|
1209
|
-
if context.indent < context.next_indent:
|
|
1210
|
-
context.indent_stack.append(context.indent)
|
|
1211
|
-
context.indent = context.next_indent
|
|
1212
|
-
if not implicit:
|
|
1213
|
-
context.next_indent += len(text)
|
|
1214
|
-
yield match.start(), token_class, text
|
|
1215
|
-
context.pos = match.end()
|
|
1216
|
-
return callback
|
|
1217
|
-
|
|
1218
|
-
def set_block_scalar_indent(token_class):
|
|
1219
|
-
"""Set an explicit indentation level for a block scalar."""
|
|
1220
|
-
def callback(lexer, match, context):
|
|
1221
|
-
text = match.group()
|
|
1222
|
-
context.block_scalar_indent = None
|
|
1223
|
-
if not text:
|
|
1224
|
-
return
|
|
1225
|
-
increment = match.group(1)
|
|
1226
|
-
if increment:
|
|
1227
|
-
current_indent = max(context.indent, 0)
|
|
1228
|
-
increment = int(increment)
|
|
1229
|
-
context.block_scalar_indent = current_indent + increment
|
|
1230
|
-
if text:
|
|
1231
|
-
yield match.start(), token_class, text
|
|
1232
|
-
context.pos = match.end()
|
|
1233
|
-
return callback
|
|
1234
|
-
|
|
1235
|
-
def parse_block_scalar_empty_line(indent_token_class, content_token_class):
|
|
1236
|
-
"""Process an empty line in a block scalar."""
|
|
1237
|
-
def callback(lexer, match, context):
|
|
1238
|
-
text = match.group()
|
|
1239
|
-
if (context.block_scalar_indent is None or
|
|
1240
|
-
len(text) <= context.block_scalar_indent):
|
|
1241
|
-
if text:
|
|
1242
|
-
yield match.start(), indent_token_class, text
|
|
1243
|
-
else:
|
|
1244
|
-
indentation = text[:context.block_scalar_indent]
|
|
1245
|
-
content = text[context.block_scalar_indent:]
|
|
1246
|
-
yield match.start(), indent_token_class, indentation
|
|
1247
|
-
yield (match.start()+context.block_scalar_indent,
|
|
1248
|
-
content_token_class, content)
|
|
1249
|
-
context.pos = match.end()
|
|
1250
|
-
return callback
|
|
1251
|
-
|
|
1252
|
-
def parse_block_scalar_indent(token_class):
|
|
1253
|
-
"""Process indentation spaces in a block scalar."""
|
|
1254
|
-
def callback(lexer, match, context):
|
|
1255
|
-
text = match.group()
|
|
1256
|
-
if context.block_scalar_indent is None:
|
|
1257
|
-
if len(text) <= max(context.indent, 0):
|
|
1258
|
-
context.stack.pop()
|
|
1259
|
-
context.stack.pop()
|
|
1260
|
-
return
|
|
1261
|
-
context.block_scalar_indent = len(text)
|
|
1262
|
-
else:
|
|
1263
|
-
if len(text) < context.block_scalar_indent:
|
|
1264
|
-
context.stack.pop()
|
|
1265
|
-
context.stack.pop()
|
|
1266
|
-
return
|
|
1267
|
-
if text:
|
|
1268
|
-
yield match.start(), token_class, text
|
|
1269
|
-
context.pos = match.end()
|
|
1270
|
-
return callback
|
|
1271
|
-
|
|
1272
|
-
def parse_plain_scalar_indent(token_class):
|
|
1273
|
-
"""Process indentation spaces in a plain scalar."""
|
|
1274
|
-
def callback(lexer, match, context):
|
|
1275
|
-
text = match.group()
|
|
1276
|
-
if len(text) <= context.indent:
|
|
1277
|
-
context.stack.pop()
|
|
1278
|
-
context.stack.pop()
|
|
1279
|
-
return
|
|
1280
|
-
if text:
|
|
1281
|
-
yield match.start(), token_class, text
|
|
1282
|
-
context.pos = match.end()
|
|
1283
|
-
return callback
|
|
1284
|
-
|
|
1285
|
-
|
|
1286
|
-
|
|
1287
|
-
tokens = {
|
|
1288
|
-
# the root rules
|
|
1289
|
-
'root': [
|
|
1290
|
-
# ignored whitespaces
|
|
1291
|
-
(r'[ ]+(?=#|$)', Text),
|
|
1292
|
-
# line breaks
|
|
1293
|
-
(r'\n+', Text),
|
|
1294
|
-
# a comment
|
|
1295
|
-
(r'#[^\n]*', Comment.Single),
|
|
1296
|
-
# the '%YAML' directive
|
|
1297
|
-
(r'^%YAML(?=[ ]|$)', reset_indent(Name.Tag), 'yaml-directive'),
|
|
1298
|
-
# the %TAG directive
|
|
1299
|
-
(r'^%TAG(?=[ ]|$)', reset_indent(Name.Tag), 'tag-directive'),
|
|
1300
|
-
# document start and document end indicators
|
|
1301
|
-
(r'^(?:---|\.\.\.)(?=[ ]|$)', reset_indent(Name.Namespace),
|
|
1302
|
-
'block-line'),
|
|
1303
|
-
# indentation spaces
|
|
1304
|
-
(r'[ ]*(?![ \t\n\r\f\v]|$)', save_indent(Text, start=True),
|
|
1305
|
-
('block-line', 'indentation')),
|
|
1306
|
-
],
|
|
1307
|
-
|
|
1308
|
-
# trailing whitespaces after directives or a block scalar indicator
|
|
1309
|
-
'ignored-line': [
|
|
1310
|
-
# ignored whitespaces
|
|
1311
|
-
(r'[ ]+(?=#|$)', Text),
|
|
1312
|
-
# a comment
|
|
1313
|
-
(r'#[^\n]*', Comment.Single),
|
|
1314
|
-
# line break
|
|
1315
|
-
(r'\n', Text, '#pop:2'),
|
|
1316
|
-
],
|
|
1317
|
-
|
|
1318
|
-
# the %YAML directive
|
|
1319
|
-
'yaml-directive': [
|
|
1320
|
-
# the version number
|
|
1321
|
-
(r'([ ]+)([0-9]+\.[0-9]+)',
|
|
1322
|
-
bygroups(Text, Number), 'ignored-line'),
|
|
1323
|
-
],
|
|
1324
|
-
|
|
1325
|
-
# the %YAG directive
|
|
1326
|
-
'tag-directive': [
|
|
1327
|
-
# a tag handle and the corresponding prefix
|
|
1328
|
-
(r'([ ]+)(!|![0-9A-Za-z_-]*!)'
|
|
1329
|
-
r'([ ]+)(!|!?[0-9A-Za-z;/?:@&=+$,_.!~*\'()\[\]%-]+)',
|
|
1330
|
-
bygroups(Text, Keyword.Type, Text, Keyword.Type),
|
|
1331
|
-
'ignored-line'),
|
|
1332
|
-
],
|
|
1333
|
-
|
|
1334
|
-
# block scalar indicators and indentation spaces
|
|
1335
|
-
'indentation': [
|
|
1336
|
-
# trailing whitespaces are ignored
|
|
1337
|
-
(r'[ ]*$', something(Text), '#pop:2'),
|
|
1338
|
-
# whitespaces preceeding block collection indicators
|
|
1339
|
-
(r'[ ]+(?=[?:-](?:[ ]|$))', save_indent(Text)),
|
|
1340
|
-
# block collection indicators
|
|
1341
|
-
(r'[?:-](?=[ ]|$)', set_indent(Punctuation.Indicator)),
|
|
1342
|
-
# the beginning a block line
|
|
1343
|
-
(r'[ ]*', save_indent(Text), '#pop'),
|
|
1344
|
-
],
|
|
1345
|
-
|
|
1346
|
-
# an indented line in the block context
|
|
1347
|
-
'block-line': [
|
|
1348
|
-
# the line end
|
|
1349
|
-
(r'[ ]*(?=#|$)', something(Text), '#pop'),
|
|
1350
|
-
# whitespaces separating tokens
|
|
1351
|
-
(r'[ ]+', Text),
|
|
1352
|
-
# tags, anchors and aliases,
|
|
1353
|
-
include('descriptors'),
|
|
1354
|
-
# block collections and scalars
|
|
1355
|
-
include('block-nodes'),
|
|
1356
|
-
# flow collections and quoted scalars
|
|
1357
|
-
include('flow-nodes'),
|
|
1358
|
-
# a plain scalar
|
|
1359
|
-
(r'(?=[^ \t\n\r\f\v?:,\[\]{}#&*!|>\'"%@`-]|[?:-][^ \t\n\r\f\v])',
|
|
1360
|
-
something(Name.Variable),
|
|
1361
|
-
'plain-scalar-in-block-context'),
|
|
1362
|
-
],
|
|
1363
|
-
|
|
1364
|
-
# tags, anchors, aliases
|
|
1365
|
-
'descriptors' : [
|
|
1366
|
-
# a full-form tag
|
|
1367
|
-
(r'!<[0-9A-Za-z;/?:@&=+$,_.!~*\'()\[\]%-]+>', Keyword.Type),
|
|
1368
|
-
# a tag in the form '!', '!suffix' or '!handle!suffix'
|
|
1369
|
-
(r'!(?:[0-9A-Za-z_-]+)?'
|
|
1370
|
-
r'(?:![0-9A-Za-z;/?:@&=+$,_.!~*\'()\[\]%-]+)?', Keyword.Type),
|
|
1371
|
-
# an anchor
|
|
1372
|
-
(r'&[0-9A-Za-z_-]+', Name.Label),
|
|
1373
|
-
# an alias
|
|
1374
|
-
(r'\*[0-9A-Za-z_-]+', Name.Variable),
|
|
1375
|
-
],
|
|
1376
|
-
|
|
1377
|
-
# block collections and scalars
|
|
1378
|
-
'block-nodes': [
|
|
1379
|
-
# implicit key
|
|
1380
|
-
(r':(?=[ ]|$)', set_indent(Punctuation.Indicator, implicit=True)),
|
|
1381
|
-
# literal and folded scalars
|
|
1382
|
-
(r'[|>]', Punctuation.Indicator,
|
|
1383
|
-
('block-scalar-content', 'block-scalar-header')),
|
|
1384
|
-
],
|
|
1385
|
-
|
|
1386
|
-
# flow collections and quoted scalars
|
|
1387
|
-
'flow-nodes': [
|
|
1388
|
-
# a flow sequence
|
|
1389
|
-
(r'\[', Punctuation.Indicator, 'flow-sequence'),
|
|
1390
|
-
# a flow mapping
|
|
1391
|
-
(r'\{', Punctuation.Indicator, 'flow-mapping'),
|
|
1392
|
-
# a single-quoted scalar
|
|
1393
|
-
(r'\'', String, 'single-quoted-scalar'),
|
|
1394
|
-
# a double-quoted scalar
|
|
1395
|
-
(r'\"', String, 'double-quoted-scalar'),
|
|
1396
|
-
],
|
|
1397
|
-
|
|
1398
|
-
# the content of a flow collection
|
|
1399
|
-
'flow-collection': [
|
|
1400
|
-
# whitespaces
|
|
1401
|
-
(r'[ ]+', Text),
|
|
1402
|
-
# line breaks
|
|
1403
|
-
(r'\n+', Text),
|
|
1404
|
-
# a comment
|
|
1405
|
-
(r'#[^\n]*', Comment.Single),
|
|
1406
|
-
# simple indicators
|
|
1407
|
-
(r'[?:,]', Punctuation.Indicator),
|
|
1408
|
-
# tags, anchors and aliases
|
|
1409
|
-
include('descriptors'),
|
|
1410
|
-
# nested collections and quoted scalars
|
|
1411
|
-
include('flow-nodes'),
|
|
1412
|
-
# a plain scalar
|
|
1413
|
-
(r'(?=[^ \t\n\r\f\v?:,\[\]{}#&*!|>\'"%@`])',
|
|
1414
|
-
something(Name.Variable),
|
|
1415
|
-
'plain-scalar-in-flow-context'),
|
|
1416
|
-
],
|
|
1417
|
-
|
|
1418
|
-
# a flow sequence indicated by '[' and ']'
|
|
1419
|
-
'flow-sequence': [
|
|
1420
|
-
# include flow collection rules
|
|
1421
|
-
include('flow-collection'),
|
|
1422
|
-
# the closing indicator
|
|
1423
|
-
(r'\]', Punctuation.Indicator, '#pop'),
|
|
1424
|
-
],
|
|
1425
|
-
|
|
1426
|
-
# a flow mapping indicated by '{' and '}'
|
|
1427
|
-
'flow-mapping': [
|
|
1428
|
-
# include flow collection rules
|
|
1429
|
-
include('flow-collection'),
|
|
1430
|
-
# the closing indicator
|
|
1431
|
-
(r'\}', Punctuation.Indicator, '#pop'),
|
|
1432
|
-
],
|
|
1433
|
-
|
|
1434
|
-
# block scalar lines
|
|
1435
|
-
'block-scalar-content': [
|
|
1436
|
-
# line break
|
|
1437
|
-
(r'\n', Text),
|
|
1438
|
-
# empty line
|
|
1439
|
-
(r'^[ ]+$',
|
|
1440
|
-
parse_block_scalar_empty_line(Text, Name.Constant)),
|
|
1441
|
-
# indentation spaces (we may leave the state here)
|
|
1442
|
-
(r'^[ ]*', parse_block_scalar_indent(Text)),
|
|
1443
|
-
# line content
|
|
1444
|
-
(r'[^\n\r\f\v]+', Name.Constant),
|
|
1445
|
-
],
|
|
1446
|
-
|
|
1447
|
-
# the content of a literal or folded scalar
|
|
1448
|
-
'block-scalar-header': [
|
|
1449
|
-
# indentation indicator followed by chomping flag
|
|
1450
|
-
(r'([1-9])?[+-]?(?=[ ]|$)',
|
|
1451
|
-
set_block_scalar_indent(Punctuation.Indicator),
|
|
1452
|
-
'ignored-line'),
|
|
1453
|
-
# chomping flag followed by indentation indicator
|
|
1454
|
-
(r'[+-]?([1-9])?(?=[ ]|$)',
|
|
1455
|
-
set_block_scalar_indent(Punctuation.Indicator),
|
|
1456
|
-
'ignored-line'),
|
|
1457
|
-
],
|
|
1458
|
-
|
|
1459
|
-
# ignored and regular whitespaces in quoted scalars
|
|
1460
|
-
'quoted-scalar-whitespaces': [
|
|
1461
|
-
# leading and trailing whitespaces are ignored
|
|
1462
|
-
(r'^[ ]+', Text),
|
|
1463
|
-
(r'[ ]+$', Text),
|
|
1464
|
-
# line breaks are ignored
|
|
1465
|
-
(r'\n+', Text),
|
|
1466
|
-
# other whitespaces are a part of the value
|
|
1467
|
-
(r'[ ]+', Name.Variable),
|
|
1468
|
-
],
|
|
1469
|
-
|
|
1470
|
-
# single-quoted scalars
|
|
1471
|
-
'single-quoted-scalar': [
|
|
1472
|
-
# include whitespace and line break rules
|
|
1473
|
-
include('quoted-scalar-whitespaces'),
|
|
1474
|
-
# escaping of the quote character
|
|
1475
|
-
(r'\'\'', String.Escape),
|
|
1476
|
-
# regular non-whitespace characters
|
|
1477
|
-
(r'[^ \t\n\r\f\v\']+', String),
|
|
1478
|
-
# the closing quote
|
|
1479
|
-
(r'\'', String, '#pop'),
|
|
1480
|
-
],
|
|
1481
|
-
|
|
1482
|
-
# double-quoted scalars
|
|
1483
|
-
'double-quoted-scalar': [
|
|
1484
|
-
# include whitespace and line break rules
|
|
1485
|
-
include('quoted-scalar-whitespaces'),
|
|
1486
|
-
# escaping of special characters
|
|
1487
|
-
(r'\\[0abt\tn\nvfre "\\N_LP]', String),
|
|
1488
|
-
# escape codes
|
|
1489
|
-
(r'\\(?:x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4}|U[0-9A-Fa-f]{8})',
|
|
1490
|
-
String.Escape),
|
|
1491
|
-
# regular non-whitespace characters
|
|
1492
|
-
(r'[^ \t\n\r\f\v\"\\]+', String),
|
|
1493
|
-
# the closing quote
|
|
1494
|
-
(r'"', String, '#pop'),
|
|
1495
|
-
],
|
|
1496
|
-
|
|
1497
|
-
# the beginning of a new line while scanning a plain scalar
|
|
1498
|
-
'plain-scalar-in-block-context-new-line': [
|
|
1499
|
-
# empty lines
|
|
1500
|
-
(r'^[ ]+$', Text),
|
|
1501
|
-
# line breaks
|
|
1502
|
-
(r'\n+', Text),
|
|
1503
|
-
# document start and document end indicators
|
|
1504
|
-
(r'^(?=---|\.\.\.)', something(Name.Namespace), '#pop:3'),
|
|
1505
|
-
# indentation spaces (we may leave the block line state here)
|
|
1506
|
-
(r'^[ ]*', parse_plain_scalar_indent(Text), '#pop'),
|
|
1507
|
-
],
|
|
1508
|
-
|
|
1509
|
-
# a plain scalar in the block context
|
|
1510
|
-
'plain-scalar-in-block-context': [
|
|
1511
|
-
# the scalar ends with the ':' indicator
|
|
1512
|
-
(r'[ ]*(?=:[ ]|:$)', something(Text), '#pop'),
|
|
1513
|
-
# the scalar ends with whitespaces followed by a comment
|
|
1514
|
-
(r'[ ]+(?=#)', Text, '#pop'),
|
|
1515
|
-
# trailing whitespaces are ignored
|
|
1516
|
-
(r'[ ]+$', Text),
|
|
1517
|
-
# line breaks are ignored
|
|
1518
|
-
(r'\n+', Text, 'plain-scalar-in-block-context-new-line'),
|
|
1519
|
-
# other whitespaces are a part of the value
|
|
1520
|
-
(r'[ ]+', Literal.Scalar.Plain),
|
|
1521
|
-
# regular non-whitespace characters
|
|
1522
|
-
(r'(?::(?![ \t\n\r\f\v])|[^ \t\n\r\f\v:])+', Literal.Scalar.Plain),
|
|
1523
|
-
],
|
|
1524
|
-
|
|
1525
|
-
# a plain scalar is the flow context
|
|
1526
|
-
'plain-scalar-in-flow-context': [
|
|
1527
|
-
# the scalar ends with an indicator character
|
|
1528
|
-
(r'[ ]*(?=[,:?\[\]{}])', something(Text), '#pop'),
|
|
1529
|
-
# the scalar ends with a comment
|
|
1530
|
-
(r'[ ]+(?=#)', Text, '#pop'),
|
|
1531
|
-
# leading and trailing whitespaces are ignored
|
|
1532
|
-
(r'^[ ]+', Text),
|
|
1533
|
-
(r'[ ]+$', Text),
|
|
1534
|
-
# line breaks are ignored
|
|
1535
|
-
(r'\n+', Text),
|
|
1536
|
-
# other whitespaces are a part of the value
|
|
1537
|
-
(r'[ ]+', Name.Variable),
|
|
1538
|
-
# regular non-whitespace characters
|
|
1539
|
-
(r'[^ \t\n\r\f\v,:?\[\]{}]+', Name.Variable),
|
|
1540
|
-
],
|
|
1541
|
-
|
|
1542
|
-
}
|
|
1543
|
-
|
|
1544
|
-
def get_tokens_unprocessed(self, text=None, context=None):
|
|
1545
|
-
if context is None:
|
|
1546
|
-
context = YamlLexerContext(text, 0)
|
|
1547
|
-
return super(YamlLexer, self).get_tokens_unprocessed(text, context)
|
|
1548
|
-
|
|
1549
|
-
|
|
1550
|
-
class LighttpdConfLexer(RegexLexer):
|
|
1551
|
-
"""
|
|
1552
|
-
Lexer for `Lighttpd <http://lighttpd.net/>`_ configuration files.
|
|
1553
|
-
|
|
1554
|
-
.. versionadded:: 0.11
|
|
1555
|
-
"""
|
|
1556
|
-
name = 'Lighttpd configuration file'
|
|
1557
|
-
aliases = ['lighty', 'lighttpd']
|
|
1558
|
-
filenames = []
|
|
1559
|
-
mimetypes = ['text/x-lighttpd-conf']
|
|
1560
|
-
|
|
1561
|
-
tokens = {
|
|
1562
|
-
'root': [
|
|
1563
|
-
(r'#.*\n', Comment.Single),
|
|
1564
|
-
(r'/\S*', Name), # pathname
|
|
1565
|
-
(r'[a-zA-Z._-]+', Keyword),
|
|
1566
|
-
(r'\d+\.\d+\.\d+\.\d+(?:/\d+)?', Number),
|
|
1567
|
-
(r'[0-9]+', Number),
|
|
1568
|
-
(r'=>|=~|\+=|==|=|\+', Operator),
|
|
1569
|
-
(r'\$[A-Z]+', Name.Builtin),
|
|
1570
|
-
(r'[(){}\[\],]', Punctuation),
|
|
1571
|
-
(r'"([^"\\]*(?:\\.[^"\\]*)*)"', String.Double),
|
|
1572
|
-
(r'\s+', Text),
|
|
1573
|
-
],
|
|
1574
|
-
|
|
1575
|
-
}
|
|
1576
|
-
|
|
1577
|
-
|
|
1578
|
-
class NginxConfLexer(RegexLexer):
|
|
1579
|
-
"""
|
|
1580
|
-
Lexer for `Nginx <http://nginx.net/>`_ configuration files.
|
|
1581
|
-
|
|
1582
|
-
.. versionadded:: 0.11
|
|
1583
|
-
"""
|
|
1584
|
-
name = 'Nginx configuration file'
|
|
1585
|
-
aliases = ['nginx']
|
|
1586
|
-
filenames = []
|
|
1587
|
-
mimetypes = ['text/x-nginx-conf']
|
|
1588
|
-
|
|
1589
|
-
tokens = {
|
|
1590
|
-
'root': [
|
|
1591
|
-
(r'(include)(\s+)([^\s;]+)', bygroups(Keyword, Text, Name)),
|
|
1592
|
-
(r'[^\s;#]+', Keyword, 'stmt'),
|
|
1593
|
-
include('base'),
|
|
1594
|
-
],
|
|
1595
|
-
'block': [
|
|
1596
|
-
(r'}', Punctuation, '#pop:2'),
|
|
1597
|
-
(r'[^\s;#]+', Keyword.Namespace, 'stmt'),
|
|
1598
|
-
include('base'),
|
|
1599
|
-
],
|
|
1600
|
-
'stmt': [
|
|
1601
|
-
(r'{', Punctuation, 'block'),
|
|
1602
|
-
(r';', Punctuation, '#pop'),
|
|
1603
|
-
include('base'),
|
|
1604
|
-
],
|
|
1605
|
-
'base': [
|
|
1606
|
-
(r'#.*\n', Comment.Single),
|
|
1607
|
-
(r'on|off', Name.Constant),
|
|
1608
|
-
(r'\$[^\s;#()]+', Name.Variable),
|
|
1609
|
-
(r'([a-z0-9.-]+)(:)([0-9]+)',
|
|
1610
|
-
bygroups(Name, Punctuation, Number.Integer)),
|
|
1611
|
-
(r'[a-z-]+/[a-z-+]+', String), # mimetype
|
|
1612
|
-
#(r'[a-zA-Z._-]+', Keyword),
|
|
1613
|
-
(r'[0-9]+[km]?\b', Number.Integer),
|
|
1614
|
-
(r'(~)(\s*)([^\s{]+)', bygroups(Punctuation, Text, String.Regex)),
|
|
1615
|
-
(r'[:=~]', Punctuation),
|
|
1616
|
-
(r'[^\s;#{}$]+', String), # catch all
|
|
1617
|
-
(r'/[^\s;#]*', Name), # pathname
|
|
1618
|
-
(r'\s+', Text),
|
|
1619
|
-
(r'[$;]', Text), # leftover characters
|
|
1620
|
-
],
|
|
1621
|
-
}
|
|
1622
|
-
|
|
1623
|
-
|
|
1624
|
-
class CMakeLexer(RegexLexer):
|
|
1625
|
-
"""
|
|
1626
|
-
Lexer for `CMake <http://cmake.org/Wiki/CMake>`_ files.
|
|
1627
|
-
|
|
1628
|
-
.. versionadded:: 1.2
|
|
1629
|
-
"""
|
|
1630
|
-
name = 'CMake'
|
|
1631
|
-
aliases = ['cmake']
|
|
1632
|
-
filenames = ['*.cmake', 'CMakeLists.txt']
|
|
1633
|
-
mimetypes = ['text/x-cmake']
|
|
1634
|
-
|
|
1635
|
-
tokens = {
|
|
1636
|
-
'root': [
|
|
1637
|
-
#(r'(ADD_CUSTOM_COMMAND|ADD_CUSTOM_TARGET|ADD_DEFINITIONS|'
|
|
1638
|
-
# r'ADD_DEPENDENCIES|ADD_EXECUTABLE|ADD_LIBRARY|ADD_SUBDIRECTORY|'
|
|
1639
|
-
# r'ADD_TEST|AUX_SOURCE_DIRECTORY|BUILD_COMMAND|BUILD_NAME|'
|
|
1640
|
-
# r'CMAKE_MINIMUM_REQUIRED|CONFIGURE_FILE|CREATE_TEST_SOURCELIST|'
|
|
1641
|
-
# r'ELSE|ELSEIF|ENABLE_LANGUAGE|ENABLE_TESTING|ENDFOREACH|'
|
|
1642
|
-
# r'ENDFUNCTION|ENDIF|ENDMACRO|ENDWHILE|EXEC_PROGRAM|'
|
|
1643
|
-
# r'EXECUTE_PROCESS|EXPORT_LIBRARY_DEPENDENCIES|FILE|FIND_FILE|'
|
|
1644
|
-
# r'FIND_LIBRARY|FIND_PACKAGE|FIND_PATH|FIND_PROGRAM|FLTK_WRAP_UI|'
|
|
1645
|
-
# r'FOREACH|FUNCTION|GET_CMAKE_PROPERTY|GET_DIRECTORY_PROPERTY|'
|
|
1646
|
-
# r'GET_FILENAME_COMPONENT|GET_SOURCE_FILE_PROPERTY|'
|
|
1647
|
-
# r'GET_TARGET_PROPERTY|GET_TEST_PROPERTY|IF|INCLUDE|'
|
|
1648
|
-
# r'INCLUDE_DIRECTORIES|INCLUDE_EXTERNAL_MSPROJECT|'
|
|
1649
|
-
# r'INCLUDE_REGULAR_EXPRESSION|INSTALL|INSTALL_FILES|'
|
|
1650
|
-
# r'INSTALL_PROGRAMS|INSTALL_TARGETS|LINK_DIRECTORIES|'
|
|
1651
|
-
# r'LINK_LIBRARIES|LIST|LOAD_CACHE|LOAD_COMMAND|MACRO|'
|
|
1652
|
-
# r'MAKE_DIRECTORY|MARK_AS_ADVANCED|MATH|MESSAGE|OPTION|'
|
|
1653
|
-
# r'OUTPUT_REQUIRED_FILES|PROJECT|QT_WRAP_CPP|QT_WRAP_UI|REMOVE|'
|
|
1654
|
-
# r'REMOVE_DEFINITIONS|SEPARATE_ARGUMENTS|SET|'
|
|
1655
|
-
# r'SET_DIRECTORY_PROPERTIES|SET_SOURCE_FILES_PROPERTIES|'
|
|
1656
|
-
# r'SET_TARGET_PROPERTIES|SET_TESTS_PROPERTIES|SITE_NAME|'
|
|
1657
|
-
# r'SOURCE_GROUP|STRING|SUBDIR_DEPENDS|SUBDIRS|'
|
|
1658
|
-
# r'TARGET_LINK_LIBRARIES|TRY_COMPILE|TRY_RUN|UNSET|'
|
|
1659
|
-
# r'USE_MANGLED_MESA|UTILITY_SOURCE|VARIABLE_REQUIRES|'
|
|
1660
|
-
# r'VTK_MAKE_INSTANTIATOR|VTK_WRAP_JAVA|VTK_WRAP_PYTHON|'
|
|
1661
|
-
# r'VTK_WRAP_TCL|WHILE|WRITE_FILE|'
|
|
1662
|
-
# r'COUNTARGS)\b', Name.Builtin, 'args'),
|
|
1663
|
-
(r'\b(\w+)([ \t]*)(\()', bygroups(Name.Builtin, Text,
|
|
1664
|
-
Punctuation), 'args'),
|
|
1665
|
-
include('keywords'),
|
|
1666
|
-
include('ws')
|
|
1667
|
-
],
|
|
1668
|
-
'args': [
|
|
1669
|
-
(r'\(', Punctuation, '#push'),
|
|
1670
|
-
(r'\)', Punctuation, '#pop'),
|
|
1671
|
-
(r'(\${)(.+?)(})', bygroups(Operator, Name.Variable, Operator)),
|
|
1672
|
-
(r'(\$<)(.+?)(>)', bygroups(Operator, Name.Variable, Operator)),
|
|
1673
|
-
(r'(?s)".*?"', String.Double),
|
|
1674
|
-
(r'\\\S+', String),
|
|
1675
|
-
(r'[^\)$"# \t\n]+', String),
|
|
1676
|
-
(r'\n', Text), # explicitly legal
|
|
1677
|
-
include('keywords'),
|
|
1678
|
-
include('ws')
|
|
1679
|
-
],
|
|
1680
|
-
'string': [
|
|
1681
|
-
|
|
1682
|
-
],
|
|
1683
|
-
'keywords': [
|
|
1684
|
-
(r'\b(WIN32|UNIX|APPLE|CYGWIN|BORLAND|MINGW|MSVC|MSVC_IDE|MSVC60|'
|
|
1685
|
-
r'MSVC70|MSVC71|MSVC80|MSVC90)\b', Keyword),
|
|
1686
|
-
],
|
|
1687
|
-
'ws': [
|
|
1688
|
-
(r'[ \t]+', Text),
|
|
1689
|
-
(r'#.*\n', Comment),
|
|
1690
|
-
]
|
|
1691
|
-
}
|
|
1692
|
-
|
|
1693
|
-
def analyse_text(text):
|
|
1694
|
-
exp = r'^ *CMAKE_MINIMUM_REQUIRED *\( *VERSION *\d(\.\d)* *( FATAL_ERROR)? *\) *$'
|
|
1695
|
-
if re.search(exp, text, flags=re.MULTILINE | re.IGNORECASE):
|
|
1696
|
-
return 0.8
|
|
1697
|
-
return 0.0
|
|
1698
|
-
|
|
1699
|
-
|
|
1700
|
-
class HttpLexer(RegexLexer):
|
|
1701
|
-
"""
|
|
1702
|
-
Lexer for HTTP sessions.
|
|
1703
|
-
|
|
1704
|
-
.. versionadded:: 1.5
|
|
1705
|
-
"""
|
|
1706
|
-
|
|
1707
|
-
name = 'HTTP'
|
|
1708
|
-
aliases = ['http']
|
|
1709
|
-
|
|
1710
|
-
flags = re.DOTALL
|
|
1711
|
-
|
|
1712
|
-
def header_callback(self, match):
|
|
1713
|
-
if match.group(1).lower() == 'content-type':
|
|
1714
|
-
content_type = match.group(5).strip()
|
|
1715
|
-
if ';' in content_type:
|
|
1716
|
-
content_type = content_type[:content_type.find(';')].strip()
|
|
1717
|
-
self.content_type = content_type
|
|
1718
|
-
yield match.start(1), Name.Attribute, match.group(1)
|
|
1719
|
-
yield match.start(2), Text, match.group(2)
|
|
1720
|
-
yield match.start(3), Operator, match.group(3)
|
|
1721
|
-
yield match.start(4), Text, match.group(4)
|
|
1722
|
-
yield match.start(5), Literal, match.group(5)
|
|
1723
|
-
yield match.start(6), Text, match.group(6)
|
|
1724
|
-
|
|
1725
|
-
def continuous_header_callback(self, match):
|
|
1726
|
-
yield match.start(1), Text, match.group(1)
|
|
1727
|
-
yield match.start(2), Literal, match.group(2)
|
|
1728
|
-
yield match.start(3), Text, match.group(3)
|
|
1729
|
-
|
|
1730
|
-
def content_callback(self, match):
|
|
1731
|
-
content_type = getattr(self, 'content_type', None)
|
|
1732
|
-
content = match.group()
|
|
1733
|
-
offset = match.start()
|
|
1734
|
-
if content_type:
|
|
1735
|
-
from pygments.lexers import get_lexer_for_mimetype
|
|
1736
|
-
try:
|
|
1737
|
-
lexer = get_lexer_for_mimetype(content_type)
|
|
1738
|
-
except ClassNotFound:
|
|
1739
|
-
pass
|
|
1740
|
-
else:
|
|
1741
|
-
for idx, token, value in lexer.get_tokens_unprocessed(content):
|
|
1742
|
-
yield offset + idx, token, value
|
|
1743
|
-
return
|
|
1744
|
-
yield offset, Text, content
|
|
1745
|
-
|
|
1746
|
-
tokens = {
|
|
1747
|
-
'root': [
|
|
1748
|
-
(r'(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE|PATCH)( +)([^ ]+)( +)'
|
|
1749
|
-
r'(HTTP)(/)(1\.[01])(\r?\n|$)',
|
|
1750
|
-
bygroups(Name.Function, Text, Name.Namespace, Text,
|
|
1751
|
-
Keyword.Reserved, Operator, Number, Text),
|
|
1752
|
-
'headers'),
|
|
1753
|
-
(r'(HTTP)(/)(1\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|$)',
|
|
1754
|
-
bygroups(Keyword.Reserved, Operator, Number, Text, Number,
|
|
1755
|
-
Text, Name.Exception, Text),
|
|
1756
|
-
'headers'),
|
|
1757
|
-
],
|
|
1758
|
-
'headers': [
|
|
1759
|
-
(r'([^\s:]+)( *)(:)( *)([^\r\n]+)(\r?\n|$)', header_callback),
|
|
1760
|
-
(r'([\t ]+)([^\r\n]+)(\r?\n|$)', continuous_header_callback),
|
|
1761
|
-
(r'\r?\n', Text, 'content')
|
|
1762
|
-
],
|
|
1763
|
-
'content': [
|
|
1764
|
-
(r'.+', content_callback)
|
|
1765
|
-
]
|
|
1766
|
-
}
|
|
1767
|
-
|
|
1768
|
-
|
|
1769
|
-
class PyPyLogLexer(RegexLexer):
|
|
1770
|
-
"""
|
|
1771
|
-
Lexer for PyPy log files.
|
|
1772
|
-
|
|
1773
|
-
.. versionadded:: 1.5
|
|
1774
|
-
"""
|
|
1775
|
-
name = "PyPy Log"
|
|
1776
|
-
aliases = ["pypylog", "pypy"]
|
|
1777
|
-
filenames = ["*.pypylog"]
|
|
1778
|
-
mimetypes = ['application/x-pypylog']
|
|
1779
|
-
|
|
1780
|
-
tokens = {
|
|
1781
|
-
"root": [
|
|
1782
|
-
(r"\[\w+\] {jit-log-.*?$", Keyword, "jit-log"),
|
|
1783
|
-
(r"\[\w+\] {jit-backend-counts$", Keyword, "jit-backend-counts"),
|
|
1784
|
-
include("extra-stuff"),
|
|
1785
|
-
],
|
|
1786
|
-
"jit-log": [
|
|
1787
|
-
(r"\[\w+\] jit-log-.*?}$", Keyword, "#pop"),
|
|
1788
|
-
(r"^\+\d+: ", Comment),
|
|
1789
|
-
(r"--end of the loop--", Comment),
|
|
1790
|
-
(r"[ifp]\d+", Name),
|
|
1791
|
-
(r"ptr\d+", Name),
|
|
1792
|
-
(r"(\()(\w+(?:\.\w+)?)(\))",
|
|
1793
|
-
bygroups(Punctuation, Name.Builtin, Punctuation)),
|
|
1794
|
-
(r"[\[\]=,()]", Punctuation),
|
|
1795
|
-
(r"(\d+\.\d+|inf|-inf)", Number.Float),
|
|
1796
|
-
(r"-?\d+", Number.Integer),
|
|
1797
|
-
(r"'.*'", String),
|
|
1798
|
-
(r"(None|descr|ConstClass|ConstPtr|TargetToken)", Name),
|
|
1799
|
-
(r"<.*?>+", Name.Builtin),
|
|
1800
|
-
(r"(label|debug_merge_point|jump|finish)", Name.Class),
|
|
1801
|
-
(r"(int_add_ovf|int_add|int_sub_ovf|int_sub|int_mul_ovf|int_mul|"
|
|
1802
|
-
r"int_floordiv|int_mod|int_lshift|int_rshift|int_and|int_or|"
|
|
1803
|
-
r"int_xor|int_eq|int_ne|int_ge|int_gt|int_le|int_lt|int_is_zero|"
|
|
1804
|
-
r"int_is_true|"
|
|
1805
|
-
r"uint_floordiv|uint_ge|uint_lt|"
|
|
1806
|
-
r"float_add|float_sub|float_mul|float_truediv|float_neg|"
|
|
1807
|
-
r"float_eq|float_ne|float_ge|float_gt|float_le|float_lt|float_abs|"
|
|
1808
|
-
r"ptr_eq|ptr_ne|instance_ptr_eq|instance_ptr_ne|"
|
|
1809
|
-
r"cast_int_to_float|cast_float_to_int|"
|
|
1810
|
-
r"force_token|quasiimmut_field|same_as|virtual_ref_finish|"
|
|
1811
|
-
r"virtual_ref|mark_opaque_ptr|"
|
|
1812
|
-
r"call_may_force|call_assembler|call_loopinvariant|"
|
|
1813
|
-
r"call_release_gil|call_pure|call|"
|
|
1814
|
-
r"new_with_vtable|new_array|newstr|newunicode|new|"
|
|
1815
|
-
r"arraylen_gc|"
|
|
1816
|
-
r"getarrayitem_gc_pure|getarrayitem_gc|setarrayitem_gc|"
|
|
1817
|
-
r"getarrayitem_raw|setarrayitem_raw|getfield_gc_pure|"
|
|
1818
|
-
r"getfield_gc|getinteriorfield_gc|setinteriorfield_gc|"
|
|
1819
|
-
r"getfield_raw|setfield_gc|setfield_raw|"
|
|
1820
|
-
r"strgetitem|strsetitem|strlen|copystrcontent|"
|
|
1821
|
-
r"unicodegetitem|unicodesetitem|unicodelen|"
|
|
1822
|
-
r"guard_true|guard_false|guard_value|guard_isnull|"
|
|
1823
|
-
r"guard_nonnull_class|guard_nonnull|guard_class|guard_no_overflow|"
|
|
1824
|
-
r"guard_not_forced|guard_no_exception|guard_not_invalidated)",
|
|
1825
|
-
Name.Builtin),
|
|
1826
|
-
include("extra-stuff"),
|
|
1827
|
-
],
|
|
1828
|
-
"jit-backend-counts": [
|
|
1829
|
-
(r"\[\w+\] jit-backend-counts}$", Keyword, "#pop"),
|
|
1830
|
-
(r":", Punctuation),
|
|
1831
|
-
(r"\d+", Number),
|
|
1832
|
-
include("extra-stuff"),
|
|
1833
|
-
],
|
|
1834
|
-
"extra-stuff": [
|
|
1835
|
-
(r"\s+", Text),
|
|
1836
|
-
(r"#.*?$", Comment),
|
|
1837
|
-
],
|
|
1838
|
-
}
|
|
1839
|
-
|
|
1840
|
-
|
|
1841
|
-
class HxmlLexer(RegexLexer):
|
|
1842
|
-
"""
|
|
1843
|
-
Lexer for `haXe build <http://haxe.org/doc/compiler>`_ files.
|
|
1844
|
-
|
|
1845
|
-
.. versionadded:: 1.6
|
|
1846
|
-
"""
|
|
1847
|
-
name = 'Hxml'
|
|
1848
|
-
aliases = ['haxeml', 'hxml']
|
|
1849
|
-
filenames = ['*.hxml']
|
|
1850
|
-
|
|
1851
|
-
tokens = {
|
|
1852
|
-
'root': [
|
|
1853
|
-
# Seperator
|
|
1854
|
-
(r'(--)(next)', bygroups(Punctuation, Generic.Heading)),
|
|
1855
|
-
# Compiler switches with one dash
|
|
1856
|
-
(r'(-)(prompt|debug|v)', bygroups(Punctuation, Keyword.Keyword)),
|
|
1857
|
-
# Compilerswitches with two dashes
|
|
1858
|
-
(r'(--)(neko-source|flash-strict|flash-use-stage|no-opt|no-traces|'
|
|
1859
|
-
r'no-inline|times|no-output)', bygroups(Punctuation, Keyword)),
|
|
1860
|
-
# Targets and other options that take an argument
|
|
1861
|
-
(r'(-)(cpp|js|neko|x|as3|swf9?|swf-lib|php|xml|main|lib|D|resource|'
|
|
1862
|
-
r'cp|cmd)( +)(.+)',
|
|
1863
|
-
bygroups(Punctuation, Keyword, Whitespace, String)),
|
|
1864
|
-
# Options that take only numerical arguments
|
|
1865
|
-
(r'(-)(swf-version)( +)(\d+)',
|
|
1866
|
-
bygroups(Punctuation, Keyword, Number.Integer)),
|
|
1867
|
-
# An Option that defines the size, the fps and the background
|
|
1868
|
-
# color of an flash movie
|
|
1869
|
-
(r'(-)(swf-header)( +)(\d+)(:)(\d+)(:)(\d+)(:)([A-Fa-f0-9]{6})',
|
|
1870
|
-
bygroups(Punctuation, Keyword, Whitespace, Number.Integer,
|
|
1871
|
-
Punctuation, Number.Integer, Punctuation, Number.Integer,
|
|
1872
|
-
Punctuation, Number.Hex)),
|
|
1873
|
-
# options with two dashes that takes arguments
|
|
1874
|
-
(r'(--)(js-namespace|php-front|php-lib|remap|gen-hx-classes)( +)'
|
|
1875
|
-
r'(.+)', bygroups(Punctuation, Keyword, Whitespace, String)),
|
|
1876
|
-
# Single line comment, multiline ones are not allowed.
|
|
1877
|
-
(r'#.*', Comment.Single)
|
|
1878
|
-
]
|
|
1879
|
-
}
|
|
1880
|
-
|
|
1881
|
-
|
|
1882
|
-
class EbnfLexer(RegexLexer):
|
|
1883
|
-
"""
|
|
1884
|
-
Lexer for `ISO/IEC 14977 EBNF
|
|
1885
|
-
<http://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_Form>`_
|
|
1886
|
-
grammars.
|
|
1887
|
-
|
|
1888
|
-
.. versionadded:: 2.0
|
|
1889
|
-
"""
|
|
1890
|
-
|
|
1891
|
-
name = 'EBNF'
|
|
1892
|
-
aliases = ['ebnf']
|
|
1893
|
-
filenames = ['*.ebnf']
|
|
1894
|
-
mimetypes = ['text/x-ebnf']
|
|
1895
|
-
|
|
1896
|
-
tokens = {
|
|
1897
|
-
'root': [
|
|
1898
|
-
include('whitespace'),
|
|
1899
|
-
include('comment_start'),
|
|
1900
|
-
include('identifier'),
|
|
1901
|
-
(r'=', Operator, 'production'),
|
|
1902
|
-
],
|
|
1903
|
-
'production': [
|
|
1904
|
-
include('whitespace'),
|
|
1905
|
-
include('comment_start'),
|
|
1906
|
-
include('identifier'),
|
|
1907
|
-
(r'"[^"]*"', String.Double),
|
|
1908
|
-
(r"'[^']*'", String.Single),
|
|
1909
|
-
(r'(\?[^?]*\?)', Name.Entity),
|
|
1910
|
-
(r'[\[\]{}(),|]', Punctuation),
|
|
1911
|
-
(r'-', Operator),
|
|
1912
|
-
(r';', Punctuation, '#pop'),
|
|
1913
|
-
],
|
|
1914
|
-
'whitespace': [
|
|
1915
|
-
(r'\s+', Text),
|
|
1916
|
-
],
|
|
1917
|
-
'comment_start': [
|
|
1918
|
-
(r'\(\*', Comment.Multiline, 'comment'),
|
|
1919
|
-
],
|
|
1920
|
-
'comment': [
|
|
1921
|
-
(r'[^*)]', Comment.Multiline),
|
|
1922
|
-
include('comment_start'),
|
|
1923
|
-
(r'\*\)', Comment.Multiline, '#pop'),
|
|
1924
|
-
(r'[*)]', Comment.Multiline),
|
|
1925
|
-
],
|
|
1926
|
-
'identifier': [
|
|
1927
|
-
(r'([a-zA-Z][a-zA-Z0-9 \-]*)', Keyword),
|
|
1928
|
-
],
|
|
1929
|
-
}
|
|
1930
|
-
|
|
1931
|
-
class TodotxtLexer(RegexLexer):
|
|
1932
|
-
"""
|
|
1933
|
-
Lexer for `Todo.txt <http://todotxt.com/>`_ todo list format.
|
|
1934
|
-
|
|
1935
|
-
.. versionadded:: 2.0
|
|
1936
|
-
"""
|
|
1937
|
-
|
|
1938
|
-
name = 'Todotxt'
|
|
1939
|
-
aliases = ['todotxt']
|
|
1940
|
-
# *.todotxt is not a standard extension for Todo.txt files; including it
|
|
1941
|
-
# makes testing easier, and also makes autodetecting file type easier.
|
|
1942
|
-
filenames = ['todo.txt', '*.todotxt']
|
|
1943
|
-
mimetypes = ['text/x-todo']
|
|
1944
|
-
|
|
1945
|
-
## Aliases mapping standard token types of Todo.txt format concepts
|
|
1946
|
-
CompleteTaskText = Operator # Chosen to de-emphasize complete tasks
|
|
1947
|
-
IncompleteTaskText = Text # Incomplete tasks should look like plain text
|
|
1948
|
-
|
|
1949
|
-
# Priority should have most emphasis to indicate importance of tasks
|
|
1950
|
-
Priority = Generic.Heading
|
|
1951
|
-
# Dates should have next most emphasis because time is important
|
|
1952
|
-
Date = Generic.Subheading
|
|
1953
|
-
|
|
1954
|
-
# Project and context should have equal weight, and be in different colors
|
|
1955
|
-
Project = Generic.Error
|
|
1956
|
-
Context = String
|
|
1957
|
-
|
|
1958
|
-
# If tag functionality is added, it should have the same weight as Project
|
|
1959
|
-
# and Context, and a different color. Generic.Traceback would work well.
|
|
1960
|
-
|
|
1961
|
-
# Regex patterns for building up rules; dates, priorities, projects, and
|
|
1962
|
-
# contexts are all atomic
|
|
1963
|
-
# TODO: Make date regex more ISO 8601 compliant
|
|
1964
|
-
date_regex = r'\d{4,}-\d{2}-\d{2}'
|
|
1965
|
-
priority_regex = r'\([A-Z]\)'
|
|
1966
|
-
project_regex = r'\+\S+'
|
|
1967
|
-
context_regex = r'@\S+'
|
|
1968
|
-
|
|
1969
|
-
# Compound regex expressions
|
|
1970
|
-
complete_one_date_regex = r'(x )(' + date_regex + r')'
|
|
1971
|
-
complete_two_date_regex = (complete_one_date_regex + r'( )(' +
|
|
1972
|
-
date_regex + r')')
|
|
1973
|
-
priority_date_regex = r'(' + priority_regex + r')( )(' + date_regex + r')'
|
|
1974
|
-
|
|
1975
|
-
tokens = {
|
|
1976
|
-
# Should parse starting at beginning of line; each line is a task
|
|
1977
|
-
'root': [
|
|
1978
|
-
## Complete task entry points: two total:
|
|
1979
|
-
# 1. Complete task with two dates
|
|
1980
|
-
(complete_two_date_regex, bygroups(CompleteTaskText, Date,
|
|
1981
|
-
CompleteTaskText, Date),
|
|
1982
|
-
'complete'),
|
|
1983
|
-
# 2. Complete task with one date
|
|
1984
|
-
(complete_one_date_regex, bygroups(CompleteTaskText, Date),
|
|
1985
|
-
'complete'),
|
|
1986
|
-
|
|
1987
|
-
## Incomplete task entry points: six total:
|
|
1988
|
-
# 1. Priority plus date
|
|
1989
|
-
(priority_date_regex, bygroups(Priority, IncompleteTaskText, Date),
|
|
1990
|
-
'incomplete'),
|
|
1991
|
-
# 2. Priority only
|
|
1992
|
-
(priority_regex, Priority, 'incomplete'),
|
|
1993
|
-
# 3. Leading date
|
|
1994
|
-
(date_regex, Date, 'incomplete'),
|
|
1995
|
-
# 4. Leading context
|
|
1996
|
-
(context_regex, Context, 'incomplete'),
|
|
1997
|
-
# 5. Leading project
|
|
1998
|
-
(project_regex, Project, 'incomplete'),
|
|
1999
|
-
# 6. Non-whitespace catch-all
|
|
2000
|
-
('\S+', IncompleteTaskText, 'incomplete'),
|
|
2001
|
-
],
|
|
2002
|
-
|
|
2003
|
-
# Parse a complete task
|
|
2004
|
-
'complete': [
|
|
2005
|
-
# Newline indicates end of task, should return to root
|
|
2006
|
-
(r'\s*\n', CompleteTaskText, '#pop'),
|
|
2007
|
-
# Tokenize contexts and projects
|
|
2008
|
-
(context_regex, Context),
|
|
2009
|
-
(project_regex, Project),
|
|
2010
|
-
# Tokenize non-whitespace text
|
|
2011
|
-
('\S+', CompleteTaskText),
|
|
2012
|
-
# Tokenize whitespace not containing a newline
|
|
2013
|
-
('\s+', CompleteTaskText),
|
|
2014
|
-
],
|
|
2015
|
-
|
|
2016
|
-
# Parse an incomplete task
|
|
2017
|
-
'incomplete': [
|
|
2018
|
-
# Newline indicates end of task, should return to root
|
|
2019
|
-
(r'\s*\n', IncompleteTaskText, '#pop'),
|
|
2020
|
-
# Tokenize contexts and projects
|
|
2021
|
-
(context_regex, Context),
|
|
2022
|
-
(project_regex, Project),
|
|
2023
|
-
# Tokenize non-whitespace text
|
|
2024
|
-
('\S+', IncompleteTaskText),
|
|
2025
|
-
# Tokenize whitespace not containing a newline
|
|
2026
|
-
('\s+', IncompleteTaskText),
|
|
2027
|
-
],
|
|
2028
|
-
}
|
|
2029
|
-
|
|
2030
|
-
|
|
2031
|
-
class DockerLexer(RegexLexer):
|
|
2032
|
-
"""
|
|
2033
|
-
Lexer for `Docker <http://docker.io>`_ configuration files.
|
|
2034
|
-
|
|
2035
|
-
.. versionadded:: 2.0
|
|
2036
|
-
"""
|
|
2037
|
-
name = 'Docker'
|
|
2038
|
-
aliases = ['docker', 'dockerfile']
|
|
2039
|
-
filenames = ['Dockerfile', '*.docker']
|
|
2040
|
-
mimetypes = ['text/x-dockerfile-config']
|
|
2041
|
-
|
|
2042
|
-
_keywords = (r'(?:FROM|MAINTAINER|RUN|CMD|EXPOSE|ENV|ADD|ENTRYPOINT|'
|
|
2043
|
-
r'VOLUME|WORKDIR)')
|
|
2044
|
-
|
|
2045
|
-
flags = re.IGNORECASE | re.MULTILINE
|
|
2046
|
-
|
|
2047
|
-
tokens = {
|
|
2048
|
-
'root': [
|
|
2049
|
-
(r'^(ONBUILD)(\s+)(%s)\b' % (_keywords,),
|
|
2050
|
-
bygroups(Name.Keyword, Whitespace, Keyword)),
|
|
2051
|
-
(_keywords + r'\b', Keyword),
|
|
2052
|
-
(r'#.*', Comment),
|
|
2053
|
-
(r'.+', using(BashLexer)),
|
|
2054
|
-
],
|
|
2055
|
-
}
|
|
12
|
+
from pygments.lexers.configs import ApacheConfLexer, NginxConfLexer, \
|
|
13
|
+
SquidConfLexer, LighttpdConfLexer, IniLexer, RegeditLexer, PropertiesLexer
|
|
14
|
+
from pygments.lexers.console import PyPyLogLexer
|
|
15
|
+
from pygments.lexers.textedit import VimLexer
|
|
16
|
+
from pygments.lexers.markup import BBCodeLexer, MoinWikiLexer, RstLexer, \
|
|
17
|
+
TexLexer, GroffLexer
|
|
18
|
+
from pygments.lexers.installers import DebianControlLexer, SourcesListLexer
|
|
19
|
+
from pygments.lexers.make import MakefileLexer, BaseMakefileLexer, CMakeLexer
|
|
20
|
+
from pygments.lexers.haxe import HxmlLexer
|
|
21
|
+
from pygments.lexers.diff import DiffLexer, DarcsPatchLexer
|
|
22
|
+
from pygments.lexers.data import YamlLexer
|
|
23
|
+
from pygments.lexers.textfmts import IrcLogsLexer, GettextLexer, HttpLexer
|
|
24
|
+
|
|
25
|
+
__all__ = []
|