pygments.rb 1.2.1 → 2.0.0.rc2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (378) hide show
  1. checksums.yaml +5 -5
  2. data/.github/dependabot.yml +13 -0
  3. data/.github/workflows/ci.yml +28 -0
  4. data/.github/workflows/release.yml +24 -0
  5. data/.gitignore +5 -6
  6. data/CHANGELOG.adoc +119 -0
  7. data/Gemfile +3 -1
  8. data/LICENSE +1 -1
  9. data/README.adoc +161 -0
  10. data/Rakefile +10 -21
  11. data/bench.rb +8 -7
  12. data/cache-lexers.rb +3 -2
  13. data/lib/pygments.rb +10 -11
  14. data/lib/pygments/lexer.rb +5 -5
  15. data/lib/pygments/mentos.py +23 -66
  16. data/lib/pygments/popen.rb +152 -204
  17. data/lib/pygments/version.rb +2 -2
  18. data/pygments.rb.gemspec +11 -9
  19. data/test/test_pygments.rb +51 -84
  20. data/vendor/pygments-main/{AUTHORS → Pygments-2.7.3.dist-info/AUTHORS} +21 -3
  21. data/vendor/pygments-main/Pygments-2.7.3.dist-info/INSTALLER +1 -0
  22. data/vendor/pygments-main/{LICENSE → Pygments-2.7.3.dist-info/LICENSE} +1 -1
  23. data/vendor/pygments-main/Pygments-2.7.3.dist-info/METADATA +49 -0
  24. data/vendor/pygments-main/Pygments-2.7.3.dist-info/RECORD +482 -0
  25. data/vendor/pygments-main/Pygments-2.7.3.dist-info/REQUESTED +0 -0
  26. data/vendor/pygments-main/Pygments-2.7.3.dist-info/WHEEL +5 -0
  27. data/vendor/pygments-main/Pygments-2.7.3.dist-info/entry_points.txt +3 -0
  28. data/vendor/pygments-main/Pygments-2.7.3.dist-info/top_level.txt +1 -0
  29. data/vendor/pygments-main/bin/pygmentize +8 -0
  30. data/vendor/pygments-main/pygments/__init__.py +6 -11
  31. data/vendor/pygments-main/pygments/__main__.py +18 -0
  32. data/vendor/pygments-main/pygments/cmdline.py +38 -29
  33. data/vendor/pygments-main/pygments/console.py +6 -9
  34. data/vendor/pygments-main/pygments/filter.py +4 -6
  35. data/vendor/pygments-main/pygments/filters/__init__.py +609 -21
  36. data/vendor/pygments-main/pygments/formatter.py +4 -4
  37. data/vendor/pygments-main/pygments/formatters/__init__.py +9 -8
  38. data/vendor/pygments-main/pygments/formatters/_mapping.py +1 -3
  39. data/vendor/pygments-main/pygments/formatters/bbcode.py +1 -1
  40. data/vendor/pygments-main/pygments/formatters/html.py +223 -135
  41. data/vendor/pygments-main/pygments/formatters/img.py +68 -41
  42. data/vendor/pygments-main/pygments/formatters/irc.py +39 -39
  43. data/vendor/pygments-main/pygments/formatters/latex.py +56 -26
  44. data/vendor/pygments-main/pygments/formatters/other.py +12 -8
  45. data/vendor/pygments-main/pygments/formatters/rtf.py +29 -29
  46. data/vendor/pygments-main/pygments/formatters/svg.py +38 -4
  47. data/vendor/pygments-main/pygments/formatters/terminal.py +25 -31
  48. data/vendor/pygments-main/pygments/formatters/terminal256.py +22 -12
  49. data/vendor/pygments-main/pygments/lexer.py +41 -39
  50. data/vendor/pygments-main/pygments/lexers/__init__.py +342 -0
  51. data/vendor/pygments-main/pygments/lexers/_asy_builtins.py +1645 -0
  52. data/vendor/pygments-main/pygments/lexers/_cl_builtins.py +232 -0
  53. data/vendor/pygments-main/pygments/lexers/_cocoa_builtins.py +71 -0
  54. data/vendor/pygments-main/pygments/lexers/_csound_builtins.py +1725 -0
  55. data/vendor/pygments-main/pygments/lexers/_lasso_builtins.py +5327 -0
  56. data/vendor/pygments-main/pygments/lexers/_lua_builtins.py +293 -0
  57. data/vendor/pygments-main/pygments/lexers/_mapping.py +551 -0
  58. data/vendor/pygments-main/pygments/lexers/_mql_builtins.py +1172 -0
  59. data/vendor/pygments-main/pygments/lexers/_mysql_builtins.py +1282 -0
  60. data/vendor/pygments-main/pygments/lexers/_openedge_builtins.py +2547 -0
  61. data/vendor/pygments-main/pygments/lexers/_php_builtins.py +4753 -0
  62. data/vendor/pygments-main/pygments/lexers/_postgres_builtins.py +678 -0
  63. data/vendor/pygments-main/pygments/lexers/_scilab_builtins.py +3094 -0
  64. data/vendor/pygments-main/pygments/lexers/_sourcemod_builtins.py +1161 -0
  65. data/vendor/pygments-main/pygments/lexers/_stan_builtins.py +558 -0
  66. data/vendor/pygments-main/pygments/lexers/_stata_builtins.py +421 -0
  67. data/vendor/pygments-main/pygments/lexers/_tsql_builtins.py +1004 -0
  68. data/vendor/pygments-main/pygments/lexers/_usd_builtins.py +113 -0
  69. data/vendor/pygments-main/pygments/lexers/_vbscript_builtins.py +280 -0
  70. data/vendor/pygments-main/pygments/lexers/_vim_builtins.py +1939 -0
  71. data/vendor/pygments-main/pygments/lexers/actionscript.py +245 -0
  72. data/vendor/pygments-main/pygments/lexers/agile.py +24 -0
  73. data/vendor/pygments-main/pygments/lexers/algebra.py +240 -0
  74. data/vendor/pygments-main/pygments/lexers/ambient.py +76 -0
  75. data/vendor/pygments-main/pygments/lexers/ampl.py +87 -0
  76. data/vendor/pygments-main/pygments/lexers/apl.py +101 -0
  77. data/vendor/pygments-main/pygments/lexers/archetype.py +318 -0
  78. data/vendor/pygments-main/pygments/lexers/arrow.py +117 -0
  79. data/vendor/pygments-main/pygments/lexers/asm.py +1005 -0
  80. data/vendor/pygments-main/pygments/lexers/automation.py +374 -0
  81. data/vendor/pygments-main/pygments/lexers/bare.py +104 -0
  82. data/vendor/pygments-main/pygments/lexers/basic.py +662 -0
  83. data/vendor/pygments-main/pygments/lexers/bibtex.py +160 -0
  84. data/vendor/pygments-main/pygments/lexers/boa.py +102 -0
  85. data/vendor/pygments-main/pygments/lexers/business.py +627 -0
  86. data/vendor/pygments-main/pygments/lexers/c_cpp.py +344 -0
  87. data/vendor/pygments-main/pygments/lexers/c_like.py +566 -0
  88. data/vendor/pygments-main/pygments/lexers/capnproto.py +78 -0
  89. data/vendor/pygments-main/pygments/lexers/chapel.py +112 -0
  90. data/vendor/pygments-main/pygments/lexers/clean.py +179 -0
  91. data/vendor/pygments-main/pygments/lexers/compiled.py +34 -0
  92. data/vendor/pygments-main/pygments/lexers/configs.py +984 -0
  93. data/vendor/pygments-main/pygments/lexers/console.py +114 -0
  94. data/vendor/pygments-main/pygments/lexers/crystal.py +392 -0
  95. data/vendor/pygments-main/pygments/lexers/csound.py +467 -0
  96. data/vendor/pygments-main/pygments/lexers/css.py +691 -0
  97. data/vendor/pygments-main/pygments/lexers/d.py +256 -0
  98. data/vendor/pygments-main/pygments/lexers/dalvik.py +125 -0
  99. data/vendor/pygments-main/pygments/lexers/data.py +698 -0
  100. data/vendor/pygments-main/pygments/lexers/devicetree.py +109 -0
  101. data/vendor/pygments-main/pygments/lexers/diff.py +165 -0
  102. data/vendor/pygments-main/pygments/lexers/dotnet.py +707 -0
  103. data/vendor/pygments-main/pygments/lexers/dsls.py +960 -0
  104. data/vendor/pygments-main/pygments/lexers/dylan.py +287 -0
  105. data/vendor/pygments-main/pygments/lexers/ecl.py +139 -0
  106. data/vendor/pygments-main/pygments/lexers/eiffel.py +65 -0
  107. data/vendor/pygments-main/pygments/lexers/elm.py +121 -0
  108. data/vendor/pygments-main/pygments/lexers/email.py +151 -0
  109. data/vendor/pygments-main/pygments/lexers/erlang.py +530 -0
  110. data/vendor/pygments-main/pygments/lexers/esoteric.py +304 -0
  111. data/vendor/pygments-main/pygments/lexers/ezhil.py +77 -0
  112. data/vendor/pygments-main/pygments/lexers/factor.py +344 -0
  113. data/vendor/pygments-main/pygments/lexers/fantom.py +250 -0
  114. data/vendor/pygments-main/pygments/lexers/felix.py +273 -0
  115. data/vendor/pygments-main/pygments/lexers/floscript.py +83 -0
  116. data/vendor/pygments-main/pygments/lexers/forth.py +178 -0
  117. data/vendor/pygments-main/pygments/lexers/fortran.py +206 -0
  118. data/vendor/pygments-main/pygments/lexers/foxpro.py +428 -0
  119. data/vendor/pygments-main/pygments/lexers/freefem.py +898 -0
  120. data/vendor/pygments-main/pygments/lexers/functional.py +21 -0
  121. data/vendor/pygments-main/pygments/lexers/gdscript.py +346 -0
  122. data/vendor/pygments-main/pygments/lexers/go.py +101 -0
  123. data/vendor/pygments-main/pygments/lexers/grammar_notation.py +270 -0
  124. data/vendor/pygments-main/pygments/lexers/graph.py +85 -0
  125. data/vendor/pygments-main/pygments/lexers/graphics.py +800 -0
  126. data/vendor/pygments-main/pygments/lexers/haskell.py +870 -0
  127. data/vendor/pygments-main/pygments/lexers/haxe.py +936 -0
  128. data/vendor/pygments-main/pygments/lexers/hdl.py +472 -0
  129. data/vendor/pygments-main/pygments/lexers/hexdump.py +103 -0
  130. data/vendor/pygments-main/pygments/lexers/html.py +614 -0
  131. data/vendor/pygments-main/pygments/lexers/idl.py +281 -0
  132. data/vendor/pygments-main/pygments/lexers/igor.py +420 -0
  133. data/vendor/pygments-main/pygments/lexers/inferno.py +96 -0
  134. data/vendor/pygments-main/pygments/lexers/installers.py +322 -0
  135. data/vendor/pygments-main/pygments/lexers/int_fiction.py +1368 -0
  136. data/vendor/pygments-main/pygments/lexers/iolang.py +63 -0
  137. data/vendor/pygments-main/pygments/lexers/j.py +146 -0
  138. data/vendor/pygments-main/pygments/lexers/javascript.py +1540 -0
  139. data/vendor/pygments-main/pygments/lexers/julia.py +331 -0
  140. data/vendor/pygments-main/pygments/lexers/jvm.py +1673 -0
  141. data/vendor/pygments-main/pygments/lexers/lisp.py +2699 -0
  142. data/vendor/pygments-main/pygments/lexers/make.py +206 -0
  143. data/vendor/pygments-main/pygments/lexers/markup.py +765 -0
  144. data/vendor/pygments-main/pygments/lexers/math.py +21 -0
  145. data/vendor/pygments-main/pygments/lexers/matlab.py +720 -0
  146. data/vendor/pygments-main/pygments/lexers/mime.py +226 -0
  147. data/vendor/pygments-main/pygments/lexers/ml.py +958 -0
  148. data/vendor/pygments-main/pygments/lexers/modeling.py +366 -0
  149. data/vendor/pygments-main/pygments/lexers/modula2.py +1580 -0
  150. data/vendor/pygments-main/pygments/lexers/monte.py +204 -0
  151. data/vendor/pygments-main/pygments/lexers/mosel.py +448 -0
  152. data/vendor/pygments-main/pygments/lexers/ncl.py +894 -0
  153. data/vendor/pygments-main/pygments/lexers/nimrod.py +159 -0
  154. data/vendor/pygments-main/pygments/lexers/nit.py +64 -0
  155. data/vendor/pygments-main/pygments/lexers/nix.py +136 -0
  156. data/vendor/pygments-main/pygments/lexers/oberon.py +121 -0
  157. data/vendor/pygments-main/pygments/lexers/objective.py +504 -0
  158. data/vendor/pygments-main/pygments/lexers/ooc.py +85 -0
  159. data/vendor/pygments-main/pygments/lexers/other.py +41 -0
  160. data/vendor/pygments-main/pygments/lexers/parasail.py +79 -0
  161. data/vendor/pygments-main/pygments/lexers/parsers.py +800 -0
  162. data/vendor/pygments-main/pygments/lexers/pascal.py +644 -0
  163. data/vendor/pygments-main/pygments/lexers/pawn.py +205 -0
  164. data/vendor/pygments-main/pygments/lexers/perl.py +732 -0
  165. data/vendor/pygments-main/pygments/lexers/php.py +321 -0
  166. data/vendor/pygments-main/pygments/lexers/pointless.py +71 -0
  167. data/vendor/pygments-main/pygments/lexers/pony.py +94 -0
  168. data/vendor/pygments-main/pygments/lexers/praat.py +302 -0
  169. data/vendor/pygments-main/pygments/lexers/prolog.py +306 -0
  170. data/vendor/pygments-main/pygments/lexers/promql.py +183 -0
  171. data/vendor/pygments-main/pygments/lexers/python.py +1151 -0
  172. data/vendor/pygments-main/pygments/lexers/qvt.py +152 -0
  173. data/vendor/pygments-main/pygments/lexers/r.py +191 -0
  174. data/vendor/pygments-main/pygments/lexers/rdf.py +463 -0
  175. data/vendor/pygments-main/pygments/lexers/rebol.py +431 -0
  176. data/vendor/pygments-main/pygments/lexers/resource.py +85 -0
  177. data/vendor/pygments-main/pygments/lexers/ride.py +139 -0
  178. data/vendor/pygments-main/pygments/lexers/rnc.py +67 -0
  179. data/vendor/pygments-main/pygments/lexers/roboconf.py +82 -0
  180. data/vendor/pygments-main/pygments/lexers/robotframework.py +552 -0
  181. data/vendor/pygments-main/pygments/lexers/ruby.py +517 -0
  182. data/vendor/pygments-main/pygments/lexers/rust.py +224 -0
  183. data/vendor/pygments-main/pygments/lexers/sas.py +228 -0
  184. data/vendor/pygments-main/pygments/lexers/scdoc.py +83 -0
  185. data/vendor/pygments-main/pygments/lexers/scripting.py +1284 -0
  186. data/vendor/pygments-main/pygments/lexers/sgf.py +61 -0
  187. data/vendor/pygments-main/pygments/lexers/shell.py +914 -0
  188. data/vendor/pygments-main/pygments/lexers/sieve.py +69 -0
  189. data/vendor/pygments-main/pygments/lexers/slash.py +185 -0
  190. data/vendor/pygments-main/pygments/lexers/smalltalk.py +195 -0
  191. data/vendor/pygments-main/pygments/lexers/smv.py +79 -0
  192. data/vendor/pygments-main/pygments/lexers/snobol.py +83 -0
  193. data/vendor/pygments-main/pygments/lexers/solidity.py +92 -0
  194. data/vendor/pygments-main/pygments/lexers/special.py +105 -0
  195. data/vendor/pygments-main/pygments/lexers/sql.py +837 -0
  196. data/vendor/pygments-main/pygments/lexers/stata.py +171 -0
  197. data/vendor/pygments-main/pygments/lexers/supercollider.py +95 -0
  198. data/vendor/pygments-main/pygments/lexers/tcl.py +145 -0
  199. data/vendor/pygments-main/pygments/lexers/templates.py +2264 -0
  200. data/vendor/pygments-main/pygments/lexers/teraterm.py +335 -0
  201. data/vendor/pygments-main/pygments/lexers/testing.py +207 -0
  202. data/vendor/pygments-main/pygments/lexers/text.py +26 -0
  203. data/vendor/pygments-main/pygments/lexers/textedit.py +169 -0
  204. data/vendor/pygments-main/pygments/lexers/textfmts.py +430 -0
  205. data/vendor/pygments-main/pygments/lexers/theorem.py +474 -0
  206. data/vendor/pygments-main/pygments/lexers/tnt.py +263 -0
  207. data/vendor/pygments-main/pygments/lexers/trafficscript.py +54 -0
  208. data/vendor/pygments-main/pygments/lexers/typoscript.py +219 -0
  209. data/vendor/pygments-main/pygments/lexers/unicon.py +412 -0
  210. data/vendor/pygments-main/pygments/lexers/urbi.py +146 -0
  211. data/vendor/pygments-main/pygments/lexers/usd.py +90 -0
  212. data/vendor/pygments-main/pygments/lexers/varnish.py +190 -0
  213. data/vendor/pygments-main/pygments/lexers/verification.py +114 -0
  214. data/vendor/pygments-main/pygments/lexers/web.py +24 -0
  215. data/vendor/pygments-main/pygments/lexers/webidl.py +299 -0
  216. data/vendor/pygments-main/pygments/lexers/webmisc.py +991 -0
  217. data/vendor/pygments-main/pygments/lexers/whiley.py +116 -0
  218. data/vendor/pygments-main/pygments/lexers/x10.py +69 -0
  219. data/vendor/pygments-main/pygments/lexers/xorg.py +37 -0
  220. data/vendor/pygments-main/pygments/lexers/yang.py +104 -0
  221. data/vendor/pygments-main/pygments/lexers/zig.py +124 -0
  222. data/vendor/pygments-main/pygments/modeline.py +1 -1
  223. data/vendor/pygments-main/pygments/plugin.py +4 -2
  224. data/vendor/pygments-main/pygments/regexopt.py +1 -1
  225. data/vendor/pygments-main/pygments/scanner.py +2 -2
  226. data/vendor/pygments-main/pygments/sphinxext.py +2 -4
  227. data/vendor/pygments-main/pygments/style.py +61 -24
  228. data/vendor/pygments-main/pygments/styles/__init__.py +10 -4
  229. data/vendor/pygments-main/pygments/styles/abap.py +1 -1
  230. data/vendor/pygments-main/pygments/styles/algol.py +1 -1
  231. data/vendor/pygments-main/pygments/styles/algol_nu.py +1 -1
  232. data/vendor/pygments-main/pygments/styles/arduino.py +2 -2
  233. data/vendor/pygments-main/pygments/styles/autumn.py +1 -1
  234. data/vendor/pygments-main/pygments/styles/borland.py +1 -1
  235. data/vendor/pygments-main/pygments/styles/bw.py +1 -1
  236. data/vendor/pygments-main/pygments/styles/colorful.py +1 -1
  237. data/vendor/pygments-main/pygments/styles/default.py +1 -1
  238. data/vendor/pygments-main/pygments/styles/emacs.py +1 -1
  239. data/vendor/pygments-main/pygments/styles/friendly.py +1 -1
  240. data/vendor/pygments-main/pygments/styles/fruity.py +1 -1
  241. data/vendor/pygments-main/pygments/styles/igor.py +1 -1
  242. data/vendor/pygments-main/pygments/styles/inkpot.py +67 -0
  243. data/vendor/pygments-main/pygments/styles/lovelace.py +1 -1
  244. data/vendor/pygments-main/pygments/styles/manni.py +1 -1
  245. data/vendor/pygments-main/pygments/styles/monokai.py +4 -3
  246. data/vendor/pygments-main/pygments/styles/murphy.py +1 -1
  247. data/vendor/pygments-main/pygments/styles/native.py +1 -1
  248. data/vendor/pygments-main/pygments/styles/paraiso_dark.py +1 -1
  249. data/vendor/pygments-main/pygments/styles/paraiso_light.py +1 -1
  250. data/vendor/pygments-main/pygments/styles/pastie.py +1 -1
  251. data/vendor/pygments-main/pygments/styles/perldoc.py +1 -1
  252. data/vendor/pygments-main/pygments/styles/rainbow_dash.py +1 -1
  253. data/vendor/pygments-main/pygments/styles/rrt.py +1 -1
  254. data/vendor/pygments-main/pygments/styles/sas.py +1 -1
  255. data/vendor/pygments-main/pygments/styles/solarized.py +134 -0
  256. data/vendor/pygments-main/pygments/styles/stata_dark.py +41 -0
  257. data/vendor/pygments-main/pygments/styles/{stata.py → stata_light.py} +14 -15
  258. data/vendor/pygments-main/pygments/styles/tango.py +1 -1
  259. data/vendor/pygments-main/pygments/styles/trac.py +1 -1
  260. data/vendor/pygments-main/pygments/styles/vim.py +1 -1
  261. data/vendor/pygments-main/pygments/styles/vs.py +1 -1
  262. data/vendor/pygments-main/pygments/styles/xcode.py +1 -1
  263. data/vendor/pygments-main/pygments/token.py +1 -1
  264. data/vendor/pygments-main/pygments/unistring.py +47 -108
  265. data/vendor/pygments-main/pygments/util.py +15 -92
  266. metadata +69 -136
  267. data/CHANGELOG.md +0 -111
  268. data/README.md +0 -121
  269. data/circle.yml +0 -20
  270. data/test/test_data.py +0 -514
  271. data/test/test_data_generated +0 -2582
  272. data/vendor/custom_lexers/github.py +0 -565
  273. data/vendor/pygments-main/CHANGES +0 -1186
  274. data/vendor/pygments-main/MANIFEST.in +0 -6
  275. data/vendor/pygments-main/Makefile +0 -65
  276. data/vendor/pygments-main/README.rst +0 -39
  277. data/vendor/pygments-main/REVISION +0 -1
  278. data/vendor/pygments-main/TODO +0 -12
  279. data/vendor/pygments-main/doc/Makefile +0 -153
  280. data/vendor/pygments-main/doc/_static/favicon.ico +0 -0
  281. data/vendor/pygments-main/doc/_static/logo_new.png +0 -0
  282. data/vendor/pygments-main/doc/_static/logo_only.png +0 -0
  283. data/vendor/pygments-main/doc/_templates/docssidebar.html +0 -3
  284. data/vendor/pygments-main/doc/_templates/indexsidebar.html +0 -25
  285. data/vendor/pygments-main/doc/_themes/pygments14/layout.html +0 -98
  286. data/vendor/pygments-main/doc/_themes/pygments14/static/bodybg.png +0 -0
  287. data/vendor/pygments-main/doc/_themes/pygments14/static/docbg.png +0 -0
  288. data/vendor/pygments-main/doc/_themes/pygments14/static/listitem.png +0 -0
  289. data/vendor/pygments-main/doc/_themes/pygments14/static/logo.png +0 -0
  290. data/vendor/pygments-main/doc/_themes/pygments14/static/pocoo.png +0 -0
  291. data/vendor/pygments-main/doc/_themes/pygments14/static/pygments14.css_t +0 -401
  292. data/vendor/pygments-main/doc/_themes/pygments14/theme.conf +0 -15
  293. data/vendor/pygments-main/doc/conf.py +0 -241
  294. data/vendor/pygments-main/doc/docs/api.rst +0 -354
  295. data/vendor/pygments-main/doc/docs/authors.rst +0 -4
  296. data/vendor/pygments-main/doc/docs/changelog.rst +0 -1
  297. data/vendor/pygments-main/doc/docs/cmdline.rst +0 -166
  298. data/vendor/pygments-main/doc/docs/filterdevelopment.rst +0 -71
  299. data/vendor/pygments-main/doc/docs/filters.rst +0 -41
  300. data/vendor/pygments-main/doc/docs/formatterdevelopment.rst +0 -169
  301. data/vendor/pygments-main/doc/docs/formatters.rst +0 -48
  302. data/vendor/pygments-main/doc/docs/index.rst +0 -66
  303. data/vendor/pygments-main/doc/docs/integrate.rst +0 -40
  304. data/vendor/pygments-main/doc/docs/java.rst +0 -70
  305. data/vendor/pygments-main/doc/docs/lexerdevelopment.rst +0 -728
  306. data/vendor/pygments-main/doc/docs/lexers.rst +0 -69
  307. data/vendor/pygments-main/doc/docs/moinmoin.rst +0 -39
  308. data/vendor/pygments-main/doc/docs/plugins.rst +0 -93
  309. data/vendor/pygments-main/doc/docs/quickstart.rst +0 -205
  310. data/vendor/pygments-main/doc/docs/rstdirective.rst +0 -22
  311. data/vendor/pygments-main/doc/docs/styles.rst +0 -201
  312. data/vendor/pygments-main/doc/docs/tokens.rst +0 -372
  313. data/vendor/pygments-main/doc/docs/unicode.rst +0 -58
  314. data/vendor/pygments-main/doc/download.rst +0 -41
  315. data/vendor/pygments-main/doc/faq.rst +0 -139
  316. data/vendor/pygments-main/doc/index.rst +0 -54
  317. data/vendor/pygments-main/doc/languages.rst +0 -154
  318. data/vendor/pygments-main/doc/make.bat +0 -190
  319. data/vendor/pygments-main/doc/pygmentize.1 +0 -94
  320. data/vendor/pygments-main/external/autopygmentize +0 -101
  321. data/vendor/pygments-main/external/lasso-builtins-generator-9.lasso +0 -162
  322. data/vendor/pygments-main/external/markdown-processor.py +0 -67
  323. data/vendor/pygments-main/external/moin-parser.py +0 -112
  324. data/vendor/pygments-main/external/pygments.bashcomp +0 -38
  325. data/vendor/pygments-main/external/rst-directive.py +0 -82
  326. data/vendor/pygments-main/pygmentize +0 -8
  327. data/vendor/pygments-main/requirements.txt +0 -5
  328. data/vendor/pygments-main/scripts/check_sources.py +0 -211
  329. data/vendor/pygments-main/scripts/debug_lexer.py +0 -246
  330. data/vendor/pygments-main/scripts/detect_missing_analyse_text.py +0 -33
  331. data/vendor/pygments-main/scripts/epydoc.css +0 -280
  332. data/vendor/pygments-main/scripts/get_vimkw.py +0 -74
  333. data/vendor/pygments-main/scripts/pylintrc +0 -301
  334. data/vendor/pygments-main/scripts/vim2pygments.py +0 -935
  335. data/vendor/pygments-main/setup.cfg +0 -10
  336. data/vendor/pygments-main/setup.py +0 -77
  337. data/vendor/pygments-main/tox.ini +0 -7
  338. data/vendor/simplejson/.gitignore +0 -10
  339. data/vendor/simplejson/.travis.yml +0 -5
  340. data/vendor/simplejson/CHANGES.txt +0 -291
  341. data/vendor/simplejson/LICENSE.txt +0 -19
  342. data/vendor/simplejson/MANIFEST.in +0 -5
  343. data/vendor/simplejson/README.rst +0 -19
  344. data/vendor/simplejson/conf.py +0 -179
  345. data/vendor/simplejson/index.rst +0 -628
  346. data/vendor/simplejson/scripts/make_docs.py +0 -18
  347. data/vendor/simplejson/setup.py +0 -104
  348. data/vendor/simplejson/simplejson/__init__.py +0 -510
  349. data/vendor/simplejson/simplejson/_speedups.c +0 -2745
  350. data/vendor/simplejson/simplejson/decoder.py +0 -425
  351. data/vendor/simplejson/simplejson/encoder.py +0 -567
  352. data/vendor/simplejson/simplejson/ordered_dict.py +0 -119
  353. data/vendor/simplejson/simplejson/scanner.py +0 -77
  354. data/vendor/simplejson/simplejson/tests/__init__.py +0 -67
  355. data/vendor/simplejson/simplejson/tests/test_bigint_as_string.py +0 -55
  356. data/vendor/simplejson/simplejson/tests/test_check_circular.py +0 -30
  357. data/vendor/simplejson/simplejson/tests/test_decimal.py +0 -66
  358. data/vendor/simplejson/simplejson/tests/test_decode.py +0 -83
  359. data/vendor/simplejson/simplejson/tests/test_default.py +0 -9
  360. data/vendor/simplejson/simplejson/tests/test_dump.py +0 -67
  361. data/vendor/simplejson/simplejson/tests/test_encode_basestring_ascii.py +0 -46
  362. data/vendor/simplejson/simplejson/tests/test_encode_for_html.py +0 -32
  363. data/vendor/simplejson/simplejson/tests/test_errors.py +0 -34
  364. data/vendor/simplejson/simplejson/tests/test_fail.py +0 -91
  365. data/vendor/simplejson/simplejson/tests/test_float.py +0 -19
  366. data/vendor/simplejson/simplejson/tests/test_indent.py +0 -86
  367. data/vendor/simplejson/simplejson/tests/test_item_sort_key.py +0 -20
  368. data/vendor/simplejson/simplejson/tests/test_namedtuple.py +0 -121
  369. data/vendor/simplejson/simplejson/tests/test_pass1.py +0 -76
  370. data/vendor/simplejson/simplejson/tests/test_pass2.py +0 -14
  371. data/vendor/simplejson/simplejson/tests/test_pass3.py +0 -20
  372. data/vendor/simplejson/simplejson/tests/test_recursion.py +0 -67
  373. data/vendor/simplejson/simplejson/tests/test_scanstring.py +0 -117
  374. data/vendor/simplejson/simplejson/tests/test_separators.py +0 -42
  375. data/vendor/simplejson/simplejson/tests/test_speedups.py +0 -20
  376. data/vendor/simplejson/simplejson/tests/test_tuple.py +0 -49
  377. data/vendor/simplejson/simplejson/tests/test_unicode.py +0 -109
  378. data/vendor/simplejson/simplejson/tool.py +0 -39
@@ -0,0 +1,171 @@
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ pygments.lexers.stata
4
+ ~~~~~~~~~~~~~~~~~~~~~
5
+
6
+ Lexer for Stata
7
+
8
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
9
+ :license: BSD, see LICENSE for details.
10
+ """
11
+
12
+ import re
13
+ from pygments.lexer import RegexLexer, default, include, words
14
+ from pygments.token import Comment, Keyword, Name, Number, \
15
+ String, Text, Operator
16
+
17
+ from pygments.lexers._stata_builtins import builtins_base, builtins_functions
18
+
19
+ __all__ = ['StataLexer']
20
+
21
+
22
+ class StataLexer(RegexLexer):
23
+ """
24
+ For `Stata <http://www.stata.com/>`_ do files.
25
+
26
+ .. versionadded:: 2.2
27
+ """
28
+ # Syntax based on
29
+ # - http://fmwww.bc.edu/RePEc/bocode/s/synlightlist.ado
30
+ # - https://github.com/isagalaev/highlight.js/blob/master/src/languages/stata.js
31
+ # - https://github.com/jpitblado/vim-stata/blob/master/syntax/stata.vim
32
+
33
+ name = 'Stata'
34
+ aliases = ['stata', 'do']
35
+ filenames = ['*.do', '*.ado']
36
+ mimetypes = ['text/x-stata', 'text/stata', 'application/x-stata']
37
+ flags = re.MULTILINE | re.DOTALL
38
+
39
+ tokens = {
40
+ 'root': [
41
+ include('comments'),
42
+ include('strings'),
43
+ include('macros'),
44
+ include('numbers'),
45
+ include('keywords'),
46
+ include('operators'),
47
+ include('format'),
48
+ (r'.', Text),
49
+ ],
50
+ # Comments are a complicated beast in Stata because they can be
51
+ # nested and there are a few corner cases with that. See:
52
+ # - github.com/kylebarron/language-stata/issues/90
53
+ # - statalist.org/forums/forum/general-stata-discussion/general/1448244
54
+ 'comments': [
55
+ (r'(^//|(?<=\s)//)(?!/)', Comment.Single, 'comments-double-slash'),
56
+ (r'^\s*\*', Comment.Single, 'comments-star'),
57
+ (r'/\*', Comment.Multiline, 'comments-block'),
58
+ (r'(^///|(?<=\s)///)', Comment.Special, 'comments-triple-slash')
59
+ ],
60
+ 'comments-block': [
61
+ (r'/\*', Comment.Multiline, '#push'),
62
+ # this ends and restarts a comment block. but need to catch this so
63
+ # that it doesn\'t start _another_ level of comment blocks
64
+ (r'\*/\*', Comment.Multiline),
65
+ (r'(\*/\s+\*(?!/)[^\n]*)|(\*/)', Comment.Multiline, '#pop'),
66
+ # Match anything else as a character inside the comment
67
+ (r'.', Comment.Multiline),
68
+ ],
69
+ 'comments-star': [
70
+ (r'///.*?\n', Comment.Single,
71
+ ('#pop', 'comments-triple-slash')),
72
+ (r'(^//|(?<=\s)//)(?!/)', Comment.Single,
73
+ ('#pop', 'comments-double-slash')),
74
+ (r'/\*', Comment.Multiline, 'comments-block'),
75
+ (r'.(?=\n)', Comment.Single, '#pop'),
76
+ (r'.', Comment.Single),
77
+ ],
78
+ 'comments-triple-slash': [
79
+ (r'\n', Comment.Special, '#pop'),
80
+ # A // breaks out of a comment for the rest of the line
81
+ (r'//.*?(?=\n)', Comment.Single, '#pop'),
82
+ (r'.', Comment.Special),
83
+ ],
84
+ 'comments-double-slash': [
85
+ (r'\n', Text, '#pop'),
86
+ (r'.', Comment.Single),
87
+ ],
88
+ # `"compound string"' and regular "string"; note the former are
89
+ # nested.
90
+ 'strings': [
91
+ (r'`"', String, 'string-compound'),
92
+ (r'(?<!`)"', String, 'string-regular'),
93
+ ],
94
+ 'string-compound': [
95
+ (r'`"', String, '#push'),
96
+ (r'"\'', String, '#pop'),
97
+ (r'\\\\|\\"|\\\$|\\`|\\\n', String.Escape),
98
+ include('macros'),
99
+ (r'.', String)
100
+ ],
101
+ 'string-regular': [
102
+ (r'(")(?!\')|(?=\n)', String, '#pop'),
103
+ (r'\\\\|\\"|\\\$|\\`|\\\n', String.Escape),
104
+ include('macros'),
105
+ (r'.', String)
106
+ ],
107
+ # A local is usually
108
+ # `\w{0,31}'
109
+ # `:extended macro'
110
+ # `=expression'
111
+ # `[rsen](results)'
112
+ # `(++--)scalar(++--)'
113
+ #
114
+ # However, there are all sorts of weird rules wrt edge
115
+ # cases. Instead of writing 27 exceptions, anything inside
116
+ # `' is a local.
117
+ #
118
+ # A global is more restricted, so we do follow rules. Note only
119
+ # locals explicitly enclosed ${} can be nested.
120
+ 'macros': [
121
+ (r'\$(\{|(?=[$`]))', Name.Variable.Global, 'macro-global-nested'),
122
+ (r'\$', Name.Variable.Global, 'macro-global-name'),
123
+ (r'`', Name.Variable, 'macro-local'),
124
+ ],
125
+ 'macro-local': [
126
+ (r'`', Name.Variable, '#push'),
127
+ (r"'", Name.Variable, '#pop'),
128
+ (r'\$(\{|(?=[$`]))', Name.Variable.Global, 'macro-global-nested'),
129
+ (r'\$', Name.Variable.Global, 'macro-global-name'),
130
+ (r'.', Name.Variable), # fallback
131
+ ],
132
+ 'macro-global-nested': [
133
+ (r'\$(\{|(?=[$`]))', Name.Variable.Global, '#push'),
134
+ (r'\}', Name.Variable.Global, '#pop'),
135
+ (r'\$', Name.Variable.Global, 'macro-global-name'),
136
+ (r'`', Name.Variable, 'macro-local'),
137
+ (r'\w', Name.Variable.Global), # fallback
138
+ default('#pop'),
139
+ ],
140
+ 'macro-global-name': [
141
+ (r'\$(\{|(?=[$`]))', Name.Variable.Global, 'macro-global-nested', '#pop'),
142
+ (r'\$', Name.Variable.Global, 'macro-global-name', '#pop'),
143
+ (r'`', Name.Variable, 'macro-local', '#pop'),
144
+ (r'\w{1,32}', Name.Variable.Global, '#pop'),
145
+ ],
146
+ # Built in functions and statements
147
+ 'keywords': [
148
+ (words(builtins_functions, prefix = r'\b', suffix = r'(?=\()'),
149
+ Name.Function),
150
+ (words(builtins_base, prefix = r'(^\s*|\s)', suffix = r'\b'),
151
+ Keyword),
152
+ ],
153
+ # http://www.stata.com/help.cgi?operators
154
+ 'operators': [
155
+ (r'-|==|<=|>=|<|>|&|!=', Operator),
156
+ (r'\*|\+|\^|/|!|~|==|~=', Operator)
157
+ ],
158
+ # Stata numbers
159
+ 'numbers': [
160
+ # decimal number
161
+ (r'\b[+-]?([0-9]+(\.[0-9]+)?|\.[0-9]+|\.)([eE][+-]?[0-9]+)?[i]?\b',
162
+ Number),
163
+ ],
164
+ # Stata formats
165
+ 'format': [
166
+ (r'%-?\d{1,2}(\.\d{1,2})?[gfe]c?', Name.Other),
167
+ (r'%(21x|16H|16L|8H|8L)', Name.Other),
168
+ (r'%-?(tc|tC|td|tw|tm|tq|th|ty|tg)\S{0,32}', Name.Other),
169
+ (r'%[-~]?\d{1,4}s', Name.Other),
170
+ ]
171
+ }
@@ -0,0 +1,95 @@
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ pygments.lexers.supercollider
4
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
5
+
6
+ Lexer for SuperCollider
7
+
8
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
9
+ :license: BSD, see LICENSE for details.
10
+ """
11
+
12
+ import re
13
+
14
+ from pygments.lexer import RegexLexer, include, words, default
15
+ from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
16
+ Number, Punctuation
17
+
18
+ __all__ = ['SuperColliderLexer']
19
+
20
+
21
+ class SuperColliderLexer(RegexLexer):
22
+ """
23
+ For `SuperCollider <http://supercollider.github.io/>`_ source code.
24
+
25
+ .. versionadded:: 2.1
26
+ """
27
+
28
+ name = 'SuperCollider'
29
+ aliases = ['sc', 'supercollider']
30
+ filenames = ['*.sc', '*.scd']
31
+ mimetypes = ['application/supercollider', 'text/supercollider', ]
32
+
33
+ flags = re.DOTALL | re.MULTILINE
34
+ tokens = {
35
+ 'commentsandwhitespace': [
36
+ (r'\s+', Text),
37
+ (r'<!--', Comment),
38
+ (r'//.*?\n', Comment.Single),
39
+ (r'/\*.*?\*/', Comment.Multiline)
40
+ ],
41
+ 'slashstartsregex': [
42
+ include('commentsandwhitespace'),
43
+ (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
44
+ r'([gim]+\b|\B)', String.Regex, '#pop'),
45
+ (r'(?=/)', Text, ('#pop', 'badregex')),
46
+ default('#pop'),
47
+ ],
48
+ 'badregex': [
49
+ (r'\n', Text, '#pop')
50
+ ],
51
+ 'root': [
52
+ (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
53
+ include('commentsandwhitespace'),
54
+ (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
55
+ r'(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
56
+ (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
57
+ (r'[})\].]', Punctuation),
58
+ (words((
59
+ 'for', 'in', 'while', 'do', 'break', 'return', 'continue',
60
+ 'switch', 'case', 'default', 'if', 'else', 'throw', 'try',
61
+ 'catch', 'finally', 'new', 'delete', 'typeof', 'instanceof',
62
+ 'void'), suffix=r'\b'),
63
+ Keyword, 'slashstartsregex'),
64
+ (words(('var', 'let', 'with', 'function', 'arg'), suffix=r'\b'),
65
+ Keyword.Declaration, 'slashstartsregex'),
66
+ (words((
67
+ '(abstract', 'boolean', 'byte', 'char', 'class', 'const',
68
+ 'debugger', 'double', 'enum', 'export', 'extends', 'final',
69
+ 'float', 'goto', 'implements', 'import', 'int', 'interface',
70
+ 'long', 'native', 'package', 'private', 'protected', 'public',
71
+ 'short', 'static', 'super', 'synchronized', 'throws',
72
+ 'transient', 'volatile'), suffix=r'\b'),
73
+ Keyword.Reserved),
74
+ (words(('true', 'false', 'nil', 'inf'), suffix=r'\b'), Keyword.Constant),
75
+ (words((
76
+ 'Array', 'Boolean', 'Date', 'Error', 'Function', 'Number',
77
+ 'Object', 'Packages', 'RegExp', 'String',
78
+ 'isFinite', 'isNaN', 'parseFloat', 'parseInt', 'super',
79
+ 'thisFunctionDef', 'thisFunction', 'thisMethod', 'thisProcess',
80
+ 'thisThread', 'this'), suffix=r'\b'),
81
+ Name.Builtin),
82
+ (r'[$a-zA-Z_]\w*', Name.Other),
83
+ (r'\\?[$a-zA-Z_]\w*', String.Symbol),
84
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
85
+ (r'0x[0-9a-fA-F]+', Number.Hex),
86
+ (r'[0-9]+', Number.Integer),
87
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
88
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
89
+ ]
90
+ }
91
+
92
+ def analyse_text(text):
93
+ """We're searching for a common function and a unique keyword here."""
94
+ if 'SinOsc' in text or 'thisFunctionDef' in text:
95
+ return 0.1
@@ -0,0 +1,145 @@
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ pygments.lexers.tcl
4
+ ~~~~~~~~~~~~~~~~~~~
5
+
6
+ Lexers for Tcl and related languages.
7
+
8
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
9
+ :license: BSD, see LICENSE for details.
10
+ """
11
+
12
+ from pygments.lexer import RegexLexer, include, words
13
+ from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
14
+ Number
15
+ from pygments.util import shebang_matches
16
+
17
+ __all__ = ['TclLexer']
18
+
19
+
20
+ class TclLexer(RegexLexer):
21
+ """
22
+ For Tcl source code.
23
+
24
+ .. versionadded:: 0.10
25
+ """
26
+
27
+ keyword_cmds_re = words((
28
+ 'after', 'apply', 'array', 'break', 'catch', 'continue', 'elseif', 'else', 'error',
29
+ 'eval', 'expr', 'for', 'foreach', 'global', 'if', 'namespace', 'proc', 'rename', 'return',
30
+ 'set', 'switch', 'then', 'trace', 'unset', 'update', 'uplevel', 'upvar', 'variable',
31
+ 'vwait', 'while'), prefix=r'\b', suffix=r'\b')
32
+
33
+ builtin_cmds_re = words((
34
+ 'append', 'bgerror', 'binary', 'cd', 'chan', 'clock', 'close', 'concat', 'dde', 'dict',
35
+ 'encoding', 'eof', 'exec', 'exit', 'fblocked', 'fconfigure', 'fcopy', 'file',
36
+ 'fileevent', 'flush', 'format', 'gets', 'glob', 'history', 'http', 'incr', 'info', 'interp',
37
+ 'join', 'lappend', 'lassign', 'lindex', 'linsert', 'list', 'llength', 'load', 'loadTk',
38
+ 'lrange', 'lrepeat', 'lreplace', 'lreverse', 'lsearch', 'lset', 'lsort', 'mathfunc',
39
+ 'mathop', 'memory', 'msgcat', 'open', 'package', 'pid', 'pkg::create', 'pkg_mkIndex',
40
+ 'platform', 'platform::shell', 'puts', 'pwd', 're_syntax', 'read', 'refchan',
41
+ 'regexp', 'registry', 'regsub', 'scan', 'seek', 'socket', 'source', 'split', 'string',
42
+ 'subst', 'tell', 'time', 'tm', 'unknown', 'unload'), prefix=r'\b', suffix=r'\b')
43
+
44
+ name = 'Tcl'
45
+ aliases = ['tcl']
46
+ filenames = ['*.tcl', '*.rvt']
47
+ mimetypes = ['text/x-tcl', 'text/x-script.tcl', 'application/x-tcl']
48
+
49
+ def _gen_command_rules(keyword_cmds_re, builtin_cmds_re, context=""):
50
+ return [
51
+ (keyword_cmds_re, Keyword, 'params' + context),
52
+ (builtin_cmds_re, Name.Builtin, 'params' + context),
53
+ (r'([\w.-]+)', Name.Variable, 'params' + context),
54
+ (r'#', Comment, 'comment'),
55
+ ]
56
+
57
+ tokens = {
58
+ 'root': [
59
+ include('command'),
60
+ include('basic'),
61
+ include('data'),
62
+ (r'\}', Keyword), # HACK: somehow we miscounted our braces
63
+ ],
64
+ 'command': _gen_command_rules(keyword_cmds_re, builtin_cmds_re),
65
+ 'command-in-brace': _gen_command_rules(keyword_cmds_re,
66
+ builtin_cmds_re,
67
+ "-in-brace"),
68
+ 'command-in-bracket': _gen_command_rules(keyword_cmds_re,
69
+ builtin_cmds_re,
70
+ "-in-bracket"),
71
+ 'command-in-paren': _gen_command_rules(keyword_cmds_re,
72
+ builtin_cmds_re,
73
+ "-in-paren"),
74
+ 'basic': [
75
+ (r'\(', Keyword, 'paren'),
76
+ (r'\[', Keyword, 'bracket'),
77
+ (r'\{', Keyword, 'brace'),
78
+ (r'"', String.Double, 'string'),
79
+ (r'(eq|ne|in|ni)\b', Operator.Word),
80
+ (r'!=|==|<<|>>|<=|>=|&&|\|\||\*\*|[-+~!*/%<>&^|?:]', Operator),
81
+ ],
82
+ 'data': [
83
+ (r'\s+', Text),
84
+ (r'0x[a-fA-F0-9]+', Number.Hex),
85
+ (r'0[0-7]+', Number.Oct),
86
+ (r'\d+\.\d+', Number.Float),
87
+ (r'\d+', Number.Integer),
88
+ (r'\$([\w.:-]+)', Name.Variable),
89
+ (r'([\w.:-]+)', Text),
90
+ ],
91
+ 'params': [
92
+ (r';', Keyword, '#pop'),
93
+ (r'\n', Text, '#pop'),
94
+ (r'(else|elseif|then)\b', Keyword),
95
+ include('basic'),
96
+ include('data'),
97
+ ],
98
+ 'params-in-brace': [
99
+ (r'\}', Keyword, ('#pop', '#pop')),
100
+ include('params')
101
+ ],
102
+ 'params-in-paren': [
103
+ (r'\)', Keyword, ('#pop', '#pop')),
104
+ include('params')
105
+ ],
106
+ 'params-in-bracket': [
107
+ (r'\]', Keyword, ('#pop', '#pop')),
108
+ include('params')
109
+ ],
110
+ 'string': [
111
+ (r'\[', String.Double, 'string-square'),
112
+ (r'(?s)(\\\\|\\[0-7]+|\\.|[^"\\])', String.Double),
113
+ (r'"', String.Double, '#pop')
114
+ ],
115
+ 'string-square': [
116
+ (r'\[', String.Double, 'string-square'),
117
+ (r'(?s)(\\\\|\\[0-7]+|\\.|\\\n|[^\]\\])', String.Double),
118
+ (r'\]', String.Double, '#pop')
119
+ ],
120
+ 'brace': [
121
+ (r'\}', Keyword, '#pop'),
122
+ include('command-in-brace'),
123
+ include('basic'),
124
+ include('data'),
125
+ ],
126
+ 'paren': [
127
+ (r'\)', Keyword, '#pop'),
128
+ include('command-in-paren'),
129
+ include('basic'),
130
+ include('data'),
131
+ ],
132
+ 'bracket': [
133
+ (r'\]', Keyword, '#pop'),
134
+ include('command-in-bracket'),
135
+ include('basic'),
136
+ include('data'),
137
+ ],
138
+ 'comment': [
139
+ (r'.*[^\\]\n', Comment, '#pop'),
140
+ (r'.*\\\n', Comment),
141
+ ],
142
+ }
143
+
144
+ def analyse_text(text):
145
+ return shebang_matches(text, r'(tcl)')
@@ -0,0 +1,2264 @@
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ pygments.lexers.templates
4
+ ~~~~~~~~~~~~~~~~~~~~~~~~~
5
+
6
+ Lexers for various template engines' markup.
7
+
8
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
9
+ :license: BSD, see LICENSE for details.
10
+ """
11
+
12
+ import re
13
+
14
+ from pygments.lexers.html import HtmlLexer, XmlLexer
15
+ from pygments.lexers.javascript import JavascriptLexer, LassoLexer
16
+ from pygments.lexers.css import CssLexer
17
+ from pygments.lexers.php import PhpLexer
18
+ from pygments.lexers.python import PythonLexer
19
+ from pygments.lexers.perl import PerlLexer
20
+ from pygments.lexers.jvm import JavaLexer, TeaLangLexer
21
+ from pygments.lexers.data import YamlLexer
22
+ from pygments.lexer import Lexer, DelegatingLexer, RegexLexer, bygroups, \
23
+ include, using, this, default, combined
24
+ from pygments.token import Error, Punctuation, Whitespace, \
25
+ Text, Comment, Operator, Keyword, Name, String, Number, Other, Token
26
+ from pygments.util import html_doctype_matches, looks_like_xml
27
+
28
+ __all__ = ['HtmlPhpLexer', 'XmlPhpLexer', 'CssPhpLexer',
29
+ 'JavascriptPhpLexer', 'ErbLexer', 'RhtmlLexer',
30
+ 'XmlErbLexer', 'CssErbLexer', 'JavascriptErbLexer',
31
+ 'SmartyLexer', 'HtmlSmartyLexer', 'XmlSmartyLexer',
32
+ 'CssSmartyLexer', 'JavascriptSmartyLexer', 'DjangoLexer',
33
+ 'HtmlDjangoLexer', 'CssDjangoLexer', 'XmlDjangoLexer',
34
+ 'JavascriptDjangoLexer', 'GenshiLexer', 'HtmlGenshiLexer',
35
+ 'GenshiTextLexer', 'CssGenshiLexer', 'JavascriptGenshiLexer',
36
+ 'MyghtyLexer', 'MyghtyHtmlLexer', 'MyghtyXmlLexer',
37
+ 'MyghtyCssLexer', 'MyghtyJavascriptLexer', 'MasonLexer', 'MakoLexer',
38
+ 'MakoHtmlLexer', 'MakoXmlLexer', 'MakoJavascriptLexer',
39
+ 'MakoCssLexer', 'JspLexer', 'CheetahLexer', 'CheetahHtmlLexer',
40
+ 'CheetahXmlLexer', 'CheetahJavascriptLexer', 'EvoqueLexer',
41
+ 'EvoqueHtmlLexer', 'EvoqueXmlLexer', 'ColdfusionLexer',
42
+ 'ColdfusionHtmlLexer', 'ColdfusionCFCLexer', 'VelocityLexer',
43
+ 'VelocityHtmlLexer', 'VelocityXmlLexer', 'SspLexer',
44
+ 'TeaTemplateLexer', 'LassoHtmlLexer', 'LassoXmlLexer',
45
+ 'LassoCssLexer', 'LassoJavascriptLexer', 'HandlebarsLexer',
46
+ 'HandlebarsHtmlLexer', 'YamlJinjaLexer', 'LiquidLexer',
47
+ 'TwigLexer', 'TwigHtmlLexer', 'Angular2Lexer', 'Angular2HtmlLexer']
48
+
49
+
50
+ class ErbLexer(Lexer):
51
+ """
52
+ Generic `ERB <http://ruby-doc.org/core/classes/ERB.html>`_ (Ruby Templating)
53
+ lexer.
54
+
55
+ Just highlights ruby code between the preprocessor directives, other data
56
+ is left untouched by the lexer.
57
+
58
+ All options are also forwarded to the `RubyLexer`.
59
+ """
60
+
61
+ name = 'ERB'
62
+ aliases = ['erb']
63
+ mimetypes = ['application/x-ruby-templating']
64
+
65
+ _block_re = re.compile(r'(<%%|%%>|<%=|<%#|<%-|<%|-%>|%>|^%[^%].*?$)', re.M)
66
+
67
+ def __init__(self, **options):
68
+ from pygments.lexers.ruby import RubyLexer
69
+ self.ruby_lexer = RubyLexer(**options)
70
+ Lexer.__init__(self, **options)
71
+
72
+ def get_tokens_unprocessed(self, text):
73
+ """
74
+ Since ERB doesn't allow "<%" and other tags inside of ruby
75
+ blocks we have to use a split approach here that fails for
76
+ that too.
77
+ """
78
+ tokens = self._block_re.split(text)
79
+ tokens.reverse()
80
+ state = idx = 0
81
+ try:
82
+ while True:
83
+ # text
84
+ if state == 0:
85
+ val = tokens.pop()
86
+ yield idx, Other, val
87
+ idx += len(val)
88
+ state = 1
89
+ # block starts
90
+ elif state == 1:
91
+ tag = tokens.pop()
92
+ # literals
93
+ if tag in ('<%%', '%%>'):
94
+ yield idx, Other, tag
95
+ idx += 3
96
+ state = 0
97
+ # comment
98
+ elif tag == '<%#':
99
+ yield idx, Comment.Preproc, tag
100
+ val = tokens.pop()
101
+ yield idx + 3, Comment, val
102
+ idx += 3 + len(val)
103
+ state = 2
104
+ # blocks or output
105
+ elif tag in ('<%', '<%=', '<%-'):
106
+ yield idx, Comment.Preproc, tag
107
+ idx += len(tag)
108
+ data = tokens.pop()
109
+ r_idx = 0
110
+ for r_idx, r_token, r_value in \
111
+ self.ruby_lexer.get_tokens_unprocessed(data):
112
+ yield r_idx + idx, r_token, r_value
113
+ idx += len(data)
114
+ state = 2
115
+ elif tag in ('%>', '-%>'):
116
+ yield idx, Error, tag
117
+ idx += len(tag)
118
+ state = 0
119
+ # % raw ruby statements
120
+ else:
121
+ yield idx, Comment.Preproc, tag[0]
122
+ r_idx = 0
123
+ for r_idx, r_token, r_value in \
124
+ self.ruby_lexer.get_tokens_unprocessed(tag[1:]):
125
+ yield idx + 1 + r_idx, r_token, r_value
126
+ idx += len(tag)
127
+ state = 0
128
+ # block ends
129
+ elif state == 2:
130
+ tag = tokens.pop()
131
+ if tag not in ('%>', '-%>'):
132
+ yield idx, Other, tag
133
+ else:
134
+ yield idx, Comment.Preproc, tag
135
+ idx += len(tag)
136
+ state = 0
137
+ except IndexError:
138
+ return
139
+
140
+ def analyse_text(text):
141
+ if '<%' in text and '%>' in text:
142
+ return 0.4
143
+
144
+
145
+ class SmartyLexer(RegexLexer):
146
+ """
147
+ Generic `Smarty <http://smarty.php.net/>`_ template lexer.
148
+
149
+ Just highlights smarty code between the preprocessor directives, other
150
+ data is left untouched by the lexer.
151
+ """
152
+
153
+ name = 'Smarty'
154
+ aliases = ['smarty']
155
+ filenames = ['*.tpl']
156
+ mimetypes = ['application/x-smarty']
157
+
158
+ flags = re.MULTILINE | re.DOTALL
159
+
160
+ tokens = {
161
+ 'root': [
162
+ (r'[^{]+', Other),
163
+ (r'(\{)(\*.*?\*)(\})',
164
+ bygroups(Comment.Preproc, Comment, Comment.Preproc)),
165
+ (r'(\{php\})(.*?)(\{/php\})',
166
+ bygroups(Comment.Preproc, using(PhpLexer, startinline=True),
167
+ Comment.Preproc)),
168
+ (r'(\{)(/?[a-zA-Z_]\w*)(\s*)',
169
+ bygroups(Comment.Preproc, Name.Function, Text), 'smarty'),
170
+ (r'\{', Comment.Preproc, 'smarty')
171
+ ],
172
+ 'smarty': [
173
+ (r'\s+', Text),
174
+ (r'\{', Comment.Preproc, '#push'),
175
+ (r'\}', Comment.Preproc, '#pop'),
176
+ (r'#[a-zA-Z_]\w*#', Name.Variable),
177
+ (r'\$[a-zA-Z_]\w*(\.\w+)*', Name.Variable),
178
+ (r'[~!%^&*()+=|\[\]:;,.<>/?@-]', Operator),
179
+ (r'(true|false|null)\b', Keyword.Constant),
180
+ (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
181
+ r"0[xX][0-9a-fA-F]+[Ll]?", Number),
182
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
183
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
184
+ (r'[a-zA-Z_]\w*', Name.Attribute)
185
+ ]
186
+ }
187
+
188
+ def analyse_text(text):
189
+ rv = 0.0
190
+ if re.search(r'\{if\s+.*?\}.*?\{/if\}', text):
191
+ rv += 0.15
192
+ if re.search(r'\{include\s+file=.*?\}', text):
193
+ rv += 0.15
194
+ if re.search(r'\{foreach\s+.*?\}.*?\{/foreach\}', text):
195
+ rv += 0.15
196
+ if re.search(r'\{\$.*?\}', text):
197
+ rv += 0.01
198
+ return rv
199
+
200
+
201
+ class VelocityLexer(RegexLexer):
202
+ """
203
+ Generic `Velocity <http://velocity.apache.org/>`_ template lexer.
204
+
205
+ Just highlights velocity directives and variable references, other
206
+ data is left untouched by the lexer.
207
+ """
208
+
209
+ name = 'Velocity'
210
+ aliases = ['velocity']
211
+ filenames = ['*.vm', '*.fhtml']
212
+
213
+ flags = re.MULTILINE | re.DOTALL
214
+
215
+ identifier = r'[a-zA-Z_]\w*'
216
+
217
+ tokens = {
218
+ 'root': [
219
+ (r'[^{#$]+', Other),
220
+ (r'(#)(\*.*?\*)(#)',
221
+ bygroups(Comment.Preproc, Comment, Comment.Preproc)),
222
+ (r'(##)(.*?$)',
223
+ bygroups(Comment.Preproc, Comment)),
224
+ (r'(#\{?)(' + identifier + r')(\}?)(\s?\()',
225
+ bygroups(Comment.Preproc, Name.Function, Comment.Preproc, Punctuation),
226
+ 'directiveparams'),
227
+ (r'(#\{?)(' + identifier + r')(\}|\b)',
228
+ bygroups(Comment.Preproc, Name.Function, Comment.Preproc)),
229
+ (r'\$!?\{?', Punctuation, 'variable')
230
+ ],
231
+ 'variable': [
232
+ (identifier, Name.Variable),
233
+ (r'\(', Punctuation, 'funcparams'),
234
+ (r'(\.)(' + identifier + r')',
235
+ bygroups(Punctuation, Name.Variable), '#push'),
236
+ (r'\}', Punctuation, '#pop'),
237
+ default('#pop')
238
+ ],
239
+ 'directiveparams': [
240
+ (r'(&&|\|\||==?|!=?|[-<>+*%&|^/])|\b(eq|ne|gt|lt|ge|le|not|in)\b',
241
+ Operator),
242
+ (r'\[', Operator, 'rangeoperator'),
243
+ (r'\b' + identifier + r'\b', Name.Function),
244
+ include('funcparams')
245
+ ],
246
+ 'rangeoperator': [
247
+ (r'\.\.', Operator),
248
+ include('funcparams'),
249
+ (r'\]', Operator, '#pop')
250
+ ],
251
+ 'funcparams': [
252
+ (r'\$!?\{?', Punctuation, 'variable'),
253
+ (r'\s+', Text),
254
+ (r'[,:]', Punctuation),
255
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
256
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
257
+ (r"0[xX][0-9a-fA-F]+[Ll]?", Number),
258
+ (r"\b[0-9]+\b", Number),
259
+ (r'(true|false|null)\b', Keyword.Constant),
260
+ (r'\(', Punctuation, '#push'),
261
+ (r'\)', Punctuation, '#pop'),
262
+ (r'\{', Punctuation, '#push'),
263
+ (r'\}', Punctuation, '#pop'),
264
+ (r'\[', Punctuation, '#push'),
265
+ (r'\]', Punctuation, '#pop'),
266
+ ]
267
+ }
268
+
269
+ def analyse_text(text):
270
+ rv = 0.0
271
+ if re.search(r'#\{?macro\}?\(.*?\).*?#\{?end\}?', text):
272
+ rv += 0.25
273
+ if re.search(r'#\{?if\}?\(.+?\).*?#\{?end\}?', text):
274
+ rv += 0.15
275
+ if re.search(r'#\{?foreach\}?\(.+?\).*?#\{?end\}?', text):
276
+ rv += 0.15
277
+ if re.search(r'\$!?\{?[a-zA-Z_]\w*(\([^)]*\))?'
278
+ r'(\.\w+(\([^)]*\))?)*\}?', text):
279
+ rv += 0.01
280
+ return rv
281
+
282
+
283
+ class VelocityHtmlLexer(DelegatingLexer):
284
+ """
285
+ Subclass of the `VelocityLexer` that highlights unlexed data
286
+ with the `HtmlLexer`.
287
+
288
+ """
289
+
290
+ name = 'HTML+Velocity'
291
+ aliases = ['html+velocity']
292
+ alias_filenames = ['*.html', '*.fhtml']
293
+ mimetypes = ['text/html+velocity']
294
+
295
+ def __init__(self, **options):
296
+ super().__init__(HtmlLexer, VelocityLexer, **options)
297
+
298
+
299
+ class VelocityXmlLexer(DelegatingLexer):
300
+ """
301
+ Subclass of the `VelocityLexer` that highlights unlexed data
302
+ with the `XmlLexer`.
303
+
304
+ """
305
+
306
+ name = 'XML+Velocity'
307
+ aliases = ['xml+velocity']
308
+ alias_filenames = ['*.xml', '*.vm']
309
+ mimetypes = ['application/xml+velocity']
310
+
311
+ def __init__(self, **options):
312
+ super().__init__(XmlLexer, VelocityLexer, **options)
313
+
314
+ def analyse_text(text):
315
+ rv = VelocityLexer.analyse_text(text) - 0.01
316
+ if looks_like_xml(text):
317
+ rv += 0.4
318
+ return rv
319
+
320
+
321
+ class DjangoLexer(RegexLexer):
322
+ """
323
+ Generic `django <http://www.djangoproject.com/documentation/templates/>`_
324
+ and `jinja <https://jinja.pocoo.org/jinja/>`_ template lexer.
325
+
326
+ It just highlights django/jinja code between the preprocessor directives,
327
+ other data is left untouched by the lexer.
328
+ """
329
+
330
+ name = 'Django/Jinja'
331
+ aliases = ['django', 'jinja']
332
+ mimetypes = ['application/x-django-templating', 'application/x-jinja']
333
+
334
+ flags = re.M | re.S
335
+
336
+ tokens = {
337
+ 'root': [
338
+ (r'[^{]+', Other),
339
+ (r'\{\{', Comment.Preproc, 'var'),
340
+ # jinja/django comments
341
+ (r'\{#.*?#\}', Comment),
342
+ # django comments
343
+ (r'(\{%)(-?\s*)(comment)(\s*-?)(%\})(.*?)'
344
+ r'(\{%)(-?\s*)(endcomment)(\s*-?)(%\})',
345
+ bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
346
+ Comment, Comment.Preproc, Text, Keyword, Text,
347
+ Comment.Preproc)),
348
+ # raw jinja blocks
349
+ (r'(\{%)(-?\s*)(raw)(\s*-?)(%\})(.*?)'
350
+ r'(\{%)(-?\s*)(endraw)(\s*-?)(%\})',
351
+ bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
352
+ Text, Comment.Preproc, Text, Keyword, Text,
353
+ Comment.Preproc)),
354
+ # filter blocks
355
+ (r'(\{%)(-?\s*)(filter)(\s+)([a-zA-Z_]\w*)',
356
+ bygroups(Comment.Preproc, Text, Keyword, Text, Name.Function),
357
+ 'block'),
358
+ (r'(\{%)(-?\s*)([a-zA-Z_]\w*)',
359
+ bygroups(Comment.Preproc, Text, Keyword), 'block'),
360
+ (r'\{', Other)
361
+ ],
362
+ 'varnames': [
363
+ (r'(\|)(\s*)([a-zA-Z_]\w*)',
364
+ bygroups(Operator, Text, Name.Function)),
365
+ (r'(is)(\s+)(not)?(\s+)?([a-zA-Z_]\w*)',
366
+ bygroups(Keyword, Text, Keyword, Text, Name.Function)),
367
+ (r'(_|true|false|none|True|False|None)\b', Keyword.Pseudo),
368
+ (r'(in|as|reversed|recursive|not|and|or|is|if|else|import|'
369
+ r'with(?:(?:out)?\s*context)?|scoped|ignore\s+missing)\b',
370
+ Keyword),
371
+ (r'(loop|block|super|forloop)\b', Name.Builtin),
372
+ (r'[a-zA-Z_][\w-]*', Name.Variable),
373
+ (r'\.\w+', Name.Variable),
374
+ (r':?"(\\\\|\\"|[^"])*"', String.Double),
375
+ (r":?'(\\\\|\\'|[^'])*'", String.Single),
376
+ (r'([{}()\[\]+\-*/%,:~]|[><=]=?|!=)', Operator),
377
+ (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
378
+ r"0[xX][0-9a-fA-F]+[Ll]?", Number),
379
+ ],
380
+ 'var': [
381
+ (r'\s+', Text),
382
+ (r'(-?)(\}\})', bygroups(Text, Comment.Preproc), '#pop'),
383
+ include('varnames')
384
+ ],
385
+ 'block': [
386
+ (r'\s+', Text),
387
+ (r'(-?)(%\})', bygroups(Text, Comment.Preproc), '#pop'),
388
+ include('varnames'),
389
+ (r'.', Punctuation)
390
+ ]
391
+ }
392
+
393
+ def analyse_text(text):
394
+ rv = 0.0
395
+ if re.search(r'\{%\s*(block|extends)', text) is not None:
396
+ rv += 0.4
397
+ if re.search(r'\{%\s*if\s*.*?%\}', text) is not None:
398
+ rv += 0.1
399
+ if re.search(r'\{\{.*?\}\}', text) is not None:
400
+ rv += 0.1
401
+ return rv
402
+
403
+
404
+ class MyghtyLexer(RegexLexer):
405
+ """
406
+ Generic `myghty templates`_ lexer. Code that isn't Myghty
407
+ markup is yielded as `Token.Other`.
408
+
409
+ .. versionadded:: 0.6
410
+
411
+ .. _myghty templates: http://www.myghty.org/
412
+ """
413
+
414
+ name = 'Myghty'
415
+ aliases = ['myghty']
416
+ filenames = ['*.myt', 'autodelegate']
417
+ mimetypes = ['application/x-myghty']
418
+
419
+ tokens = {
420
+ 'root': [
421
+ (r'\s+', Text),
422
+ (r'(?s)(<%(?:def|method))(\s*)(.*?)(>)(.*?)(</%\2\s*>)',
423
+ bygroups(Name.Tag, Text, Name.Function, Name.Tag,
424
+ using(this), Name.Tag)),
425
+ (r'(?s)(<%\w+)(.*?)(>)(.*?)(</%\2\s*>)',
426
+ bygroups(Name.Tag, Name.Function, Name.Tag,
427
+ using(PythonLexer), Name.Tag)),
428
+ (r'(<&[^|])(.*?)(,.*?)?(&>)',
429
+ bygroups(Name.Tag, Name.Function, using(PythonLexer), Name.Tag)),
430
+ (r'(?s)(<&\|)(.*?)(,.*?)?(&>)',
431
+ bygroups(Name.Tag, Name.Function, using(PythonLexer), Name.Tag)),
432
+ (r'</&>', Name.Tag),
433
+ (r'(?s)(<%!?)(.*?)(%>)',
434
+ bygroups(Name.Tag, using(PythonLexer), Name.Tag)),
435
+ (r'(?<=^)#[^\n]*(\n|\Z)', Comment),
436
+ (r'(?<=^)(%)([^\n]*)(\n|\Z)',
437
+ bygroups(Name.Tag, using(PythonLexer), Other)),
438
+ (r"""(?sx)
439
+ (.+?) # anything, followed by:
440
+ (?:
441
+ (?<=\n)(?=[%#]) | # an eval or comment line
442
+ (?=</?[%&]) | # a substitution or block or
443
+ # call start or end
444
+ # - don't consume
445
+ (\\\n) | # an escaped newline
446
+ \Z # end of string
447
+ )""", bygroups(Other, Operator)),
448
+ ]
449
+ }
450
+
451
+
452
+ class MyghtyHtmlLexer(DelegatingLexer):
453
+ """
454
+ Subclass of the `MyghtyLexer` that highlights unlexed data
455
+ with the `HtmlLexer`.
456
+
457
+ .. versionadded:: 0.6
458
+ """
459
+
460
+ name = 'HTML+Myghty'
461
+ aliases = ['html+myghty']
462
+ mimetypes = ['text/html+myghty']
463
+
464
+ def __init__(self, **options):
465
+ super().__init__(HtmlLexer, MyghtyLexer, **options)
466
+
467
+
468
+ class MyghtyXmlLexer(DelegatingLexer):
469
+ """
470
+ Subclass of the `MyghtyLexer` that highlights unlexed data
471
+ with the `XmlLexer`.
472
+
473
+ .. versionadded:: 0.6
474
+ """
475
+
476
+ name = 'XML+Myghty'
477
+ aliases = ['xml+myghty']
478
+ mimetypes = ['application/xml+myghty']
479
+
480
+ def __init__(self, **options):
481
+ super().__init__(XmlLexer, MyghtyLexer, **options)
482
+
483
+
484
+ class MyghtyJavascriptLexer(DelegatingLexer):
485
+ """
486
+ Subclass of the `MyghtyLexer` that highlights unlexed data
487
+ with the `JavascriptLexer`.
488
+
489
+ .. versionadded:: 0.6
490
+ """
491
+
492
+ name = 'JavaScript+Myghty'
493
+ aliases = ['js+myghty', 'javascript+myghty']
494
+ mimetypes = ['application/x-javascript+myghty',
495
+ 'text/x-javascript+myghty',
496
+ 'text/javascript+mygthy']
497
+
498
+ def __init__(self, **options):
499
+ super().__init__(JavascriptLexer, MyghtyLexer, **options)
500
+
501
+
502
+ class MyghtyCssLexer(DelegatingLexer):
503
+ """
504
+ Subclass of the `MyghtyLexer` that highlights unlexed data
505
+ with the `CssLexer`.
506
+
507
+ .. versionadded:: 0.6
508
+ """
509
+
510
+ name = 'CSS+Myghty'
511
+ aliases = ['css+myghty']
512
+ mimetypes = ['text/css+myghty']
513
+
514
+ def __init__(self, **options):
515
+ super().__init__(CssLexer, MyghtyLexer, **options)
516
+
517
+
518
+ class MasonLexer(RegexLexer):
519
+ """
520
+ Generic `mason templates`_ lexer. Stolen from Myghty lexer. Code that isn't
521
+ Mason markup is HTML.
522
+
523
+ .. _mason templates: http://www.masonhq.com/
524
+
525
+ .. versionadded:: 1.4
526
+ """
527
+ name = 'Mason'
528
+ aliases = ['mason']
529
+ filenames = ['*.m', '*.mhtml', '*.mc', '*.mi', 'autohandler', 'dhandler']
530
+ mimetypes = ['application/x-mason']
531
+
532
+ tokens = {
533
+ 'root': [
534
+ (r'\s+', Text),
535
+ (r'(?s)(<%doc>)(.*?)(</%doc>)',
536
+ bygroups(Name.Tag, Comment.Multiline, Name.Tag)),
537
+ (r'(?s)(<%(?:def|method))(\s*)(.*?)(>)(.*?)(</%\2\s*>)',
538
+ bygroups(Name.Tag, Text, Name.Function, Name.Tag,
539
+ using(this), Name.Tag)),
540
+ (r'(?s)(<%(\w+)(.*?)(>))(.*?)(</%\2\s*>)',
541
+ bygroups(Name.Tag, None, None, None, using(PerlLexer), Name.Tag)),
542
+ (r'(?s)(<&[^|])(.*?)(,.*?)?(&>)',
543
+ bygroups(Name.Tag, Name.Function, using(PerlLexer), Name.Tag)),
544
+ (r'(?s)(<&\|)(.*?)(,.*?)?(&>)',
545
+ bygroups(Name.Tag, Name.Function, using(PerlLexer), Name.Tag)),
546
+ (r'</&>', Name.Tag),
547
+ (r'(?s)(<%!?)(.*?)(%>)',
548
+ bygroups(Name.Tag, using(PerlLexer), Name.Tag)),
549
+ (r'(?<=^)#[^\n]*(\n|\Z)', Comment),
550
+ (r'(?<=^)(%)([^\n]*)(\n|\Z)',
551
+ bygroups(Name.Tag, using(PerlLexer), Other)),
552
+ (r"""(?sx)
553
+ (.+?) # anything, followed by:
554
+ (?:
555
+ (?<=\n)(?=[%#]) | # an eval or comment line
556
+ (?=</?[%&]) | # a substitution or block or
557
+ # call start or end
558
+ # - don't consume
559
+ (\\\n) | # an escaped newline
560
+ \Z # end of string
561
+ )""", bygroups(using(HtmlLexer), Operator)),
562
+ ]
563
+ }
564
+
565
+ def analyse_text(text):
566
+ result = 0.0
567
+ if re.search(r'</%(class|doc|init)>', text) is not None:
568
+ result = 1.0
569
+ elif re.search(r'<&.+&>', text, re.DOTALL) is not None:
570
+ result = 0.11
571
+ return result
572
+
573
+
574
+ class MakoLexer(RegexLexer):
575
+ """
576
+ Generic `mako templates`_ lexer. Code that isn't Mako
577
+ markup is yielded as `Token.Other`.
578
+
579
+ .. versionadded:: 0.7
580
+
581
+ .. _mako templates: http://www.makotemplates.org/
582
+ """
583
+
584
+ name = 'Mako'
585
+ aliases = ['mako']
586
+ filenames = ['*.mao']
587
+ mimetypes = ['application/x-mako']
588
+
589
+ tokens = {
590
+ 'root': [
591
+ (r'(\s*)(%)(\s*end(?:\w+))(\n|\Z)',
592
+ bygroups(Text, Comment.Preproc, Keyword, Other)),
593
+ (r'(\s*)(%)([^\n]*)(\n|\Z)',
594
+ bygroups(Text, Comment.Preproc, using(PythonLexer), Other)),
595
+ (r'(\s*)(##[^\n]*)(\n|\Z)',
596
+ bygroups(Text, Comment.Preproc, Other)),
597
+ (r'(?s)<%doc>.*?</%doc>', Comment.Preproc),
598
+ (r'(<%)([\w.:]+)',
599
+ bygroups(Comment.Preproc, Name.Builtin), 'tag'),
600
+ (r'(</%)([\w.:]+)(>)',
601
+ bygroups(Comment.Preproc, Name.Builtin, Comment.Preproc)),
602
+ (r'<%(?=([\w.:]+))', Comment.Preproc, 'ondeftags'),
603
+ (r'(?s)(<%(?:!?))(.*?)(%>)',
604
+ bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
605
+ (r'(\$\{)(.*?)(\})',
606
+ bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
607
+ (r'''(?sx)
608
+ (.+?) # anything, followed by:
609
+ (?:
610
+ (?<=\n)(?=%|\#\#) | # an eval or comment line
611
+ (?=\#\*) | # multiline comment
612
+ (?=</?%) | # a python block
613
+ # call start or end
614
+ (?=\$\{) | # a substitution
615
+ (?<=\n)(?=\s*%) |
616
+ # - don't consume
617
+ (\\\n) | # an escaped newline
618
+ \Z # end of string
619
+ )
620
+ ''', bygroups(Other, Operator)),
621
+ (r'\s+', Text),
622
+ ],
623
+ 'ondeftags': [
624
+ (r'<%', Comment.Preproc),
625
+ (r'(?<=<%)(include|inherit|namespace|page)', Name.Builtin),
626
+ include('tag'),
627
+ ],
628
+ 'tag': [
629
+ (r'((?:\w+)\s*=)(\s*)(".*?")',
630
+ bygroups(Name.Attribute, Text, String)),
631
+ (r'/?\s*>', Comment.Preproc, '#pop'),
632
+ (r'\s+', Text),
633
+ ],
634
+ 'attr': [
635
+ ('".*?"', String, '#pop'),
636
+ ("'.*?'", String, '#pop'),
637
+ (r'[^\s>]+', String, '#pop'),
638
+ ],
639
+ }
640
+
641
+
642
+ class MakoHtmlLexer(DelegatingLexer):
643
+ """
644
+ Subclass of the `MakoLexer` that highlights unlexed data
645
+ with the `HtmlLexer`.
646
+
647
+ .. versionadded:: 0.7
648
+ """
649
+
650
+ name = 'HTML+Mako'
651
+ aliases = ['html+mako']
652
+ mimetypes = ['text/html+mako']
653
+
654
+ def __init__(self, **options):
655
+ super().__init__(HtmlLexer, MakoLexer, **options)
656
+
657
+
658
+ class MakoXmlLexer(DelegatingLexer):
659
+ """
660
+ Subclass of the `MakoLexer` that highlights unlexed data
661
+ with the `XmlLexer`.
662
+
663
+ .. versionadded:: 0.7
664
+ """
665
+
666
+ name = 'XML+Mako'
667
+ aliases = ['xml+mako']
668
+ mimetypes = ['application/xml+mako']
669
+
670
+ def __init__(self, **options):
671
+ super().__init__(XmlLexer, MakoLexer, **options)
672
+
673
+
674
+ class MakoJavascriptLexer(DelegatingLexer):
675
+ """
676
+ Subclass of the `MakoLexer` that highlights unlexed data
677
+ with the `JavascriptLexer`.
678
+
679
+ .. versionadded:: 0.7
680
+ """
681
+
682
+ name = 'JavaScript+Mako'
683
+ aliases = ['js+mako', 'javascript+mako']
684
+ mimetypes = ['application/x-javascript+mako',
685
+ 'text/x-javascript+mako',
686
+ 'text/javascript+mako']
687
+
688
+ def __init__(self, **options):
689
+ super().__init__(JavascriptLexer, MakoLexer, **options)
690
+
691
+
692
+ class MakoCssLexer(DelegatingLexer):
693
+ """
694
+ Subclass of the `MakoLexer` that highlights unlexed data
695
+ with the `CssLexer`.
696
+
697
+ .. versionadded:: 0.7
698
+ """
699
+
700
+ name = 'CSS+Mako'
701
+ aliases = ['css+mako']
702
+ mimetypes = ['text/css+mako']
703
+
704
+ def __init__(self, **options):
705
+ super().__init__(CssLexer, MakoLexer, **options)
706
+
707
+
708
+ # Genshi and Cheetah lexers courtesy of Matt Good.
709
+
710
+ class CheetahPythonLexer(Lexer):
711
+ """
712
+ Lexer for handling Cheetah's special $ tokens in Python syntax.
713
+ """
714
+
715
+ def get_tokens_unprocessed(self, text):
716
+ pylexer = PythonLexer(**self.options)
717
+ for pos, type_, value in pylexer.get_tokens_unprocessed(text):
718
+ if type_ == Token.Error and value == '$':
719
+ type_ = Comment.Preproc
720
+ yield pos, type_, value
721
+
722
+
723
+ class CheetahLexer(RegexLexer):
724
+ """
725
+ Generic `cheetah templates`_ lexer. Code that isn't Cheetah
726
+ markup is yielded as `Token.Other`. This also works for
727
+ `spitfire templates`_ which use the same syntax.
728
+
729
+ .. _cheetah templates: http://www.cheetahtemplate.org/
730
+ .. _spitfire templates: http://code.google.com/p/spitfire/
731
+ """
732
+
733
+ name = 'Cheetah'
734
+ aliases = ['cheetah', 'spitfire']
735
+ filenames = ['*.tmpl', '*.spt']
736
+ mimetypes = ['application/x-cheetah', 'application/x-spitfire']
737
+
738
+ tokens = {
739
+ 'root': [
740
+ (r'(##[^\n]*)$',
741
+ (bygroups(Comment))),
742
+ (r'#[*](.|\n)*?[*]#', Comment),
743
+ (r'#end[^#\n]*(?:#|$)', Comment.Preproc),
744
+ (r'#slurp$', Comment.Preproc),
745
+ (r'(#[a-zA-Z]+)([^#\n]*)(#|$)',
746
+ (bygroups(Comment.Preproc, using(CheetahPythonLexer),
747
+ Comment.Preproc))),
748
+ # TODO support other Python syntax like $foo['bar']
749
+ (r'(\$)([a-zA-Z_][\w.]*\w)',
750
+ bygroups(Comment.Preproc, using(CheetahPythonLexer))),
751
+ (r'(?s)(\$\{!?)(.*?)(\})',
752
+ bygroups(Comment.Preproc, using(CheetahPythonLexer),
753
+ Comment.Preproc)),
754
+ (r'''(?sx)
755
+ (.+?) # anything, followed by:
756
+ (?:
757
+ (?=\#[#a-zA-Z]*) | # an eval comment
758
+ (?=\$[a-zA-Z_{]) | # a substitution
759
+ \Z # end of string
760
+ )
761
+ ''', Other),
762
+ (r'\s+', Text),
763
+ ],
764
+ }
765
+
766
+
767
+ class CheetahHtmlLexer(DelegatingLexer):
768
+ """
769
+ Subclass of the `CheetahLexer` that highlights unlexed data
770
+ with the `HtmlLexer`.
771
+ """
772
+
773
+ name = 'HTML+Cheetah'
774
+ aliases = ['html+cheetah', 'html+spitfire', 'htmlcheetah']
775
+ mimetypes = ['text/html+cheetah', 'text/html+spitfire']
776
+
777
+ def __init__(self, **options):
778
+ super().__init__(HtmlLexer, CheetahLexer, **options)
779
+
780
+
781
+ class CheetahXmlLexer(DelegatingLexer):
782
+ """
783
+ Subclass of the `CheetahLexer` that highlights unlexed data
784
+ with the `XmlLexer`.
785
+ """
786
+
787
+ name = 'XML+Cheetah'
788
+ aliases = ['xml+cheetah', 'xml+spitfire']
789
+ mimetypes = ['application/xml+cheetah', 'application/xml+spitfire']
790
+
791
+ def __init__(self, **options):
792
+ super().__init__(XmlLexer, CheetahLexer, **options)
793
+
794
+
795
+ class CheetahJavascriptLexer(DelegatingLexer):
796
+ """
797
+ Subclass of the `CheetahLexer` that highlights unlexed data
798
+ with the `JavascriptLexer`.
799
+ """
800
+
801
+ name = 'JavaScript+Cheetah'
802
+ aliases = ['js+cheetah', 'javascript+cheetah',
803
+ 'js+spitfire', 'javascript+spitfire']
804
+ mimetypes = ['application/x-javascript+cheetah',
805
+ 'text/x-javascript+cheetah',
806
+ 'text/javascript+cheetah',
807
+ 'application/x-javascript+spitfire',
808
+ 'text/x-javascript+spitfire',
809
+ 'text/javascript+spitfire']
810
+
811
+ def __init__(self, **options):
812
+ super().__init__(JavascriptLexer, CheetahLexer, **options)
813
+
814
+
815
+ class GenshiTextLexer(RegexLexer):
816
+ """
817
+ A lexer that highlights `genshi <http://genshi.edgewall.org/>`_ text
818
+ templates.
819
+ """
820
+
821
+ name = 'Genshi Text'
822
+ aliases = ['genshitext']
823
+ mimetypes = ['application/x-genshi-text', 'text/x-genshi']
824
+
825
+ tokens = {
826
+ 'root': [
827
+ (r'[^#$\s]+', Other),
828
+ (r'^(\s*)(##.*)$', bygroups(Text, Comment)),
829
+ (r'^(\s*)(#)', bygroups(Text, Comment.Preproc), 'directive'),
830
+ include('variable'),
831
+ (r'[#$\s]', Other),
832
+ ],
833
+ 'directive': [
834
+ (r'\n', Text, '#pop'),
835
+ (r'(?:def|for|if)\s+.*', using(PythonLexer), '#pop'),
836
+ (r'(choose|when|with)([^\S\n]+)(.*)',
837
+ bygroups(Keyword, Text, using(PythonLexer)), '#pop'),
838
+ (r'(choose|otherwise)\b', Keyword, '#pop'),
839
+ (r'(end\w*)([^\S\n]*)(.*)', bygroups(Keyword, Text, Comment), '#pop'),
840
+ ],
841
+ 'variable': [
842
+ (r'(?<!\$)(\$\{)(.+?)(\})',
843
+ bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
844
+ (r'(?<!\$)(\$)([a-zA-Z_][\w.]*)',
845
+ Name.Variable),
846
+ ]
847
+ }
848
+
849
+
850
+ class GenshiMarkupLexer(RegexLexer):
851
+ """
852
+ Base lexer for Genshi markup, used by `HtmlGenshiLexer` and
853
+ `GenshiLexer`.
854
+ """
855
+
856
+ flags = re.DOTALL
857
+
858
+ tokens = {
859
+ 'root': [
860
+ (r'[^<$]+', Other),
861
+ (r'(<\?python)(.*?)(\?>)',
862
+ bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
863
+ # yield style and script blocks as Other
864
+ (r'<\s*(script|style)\s*.*?>.*?<\s*/\1\s*>', Other),
865
+ (r'<\s*py:[a-zA-Z0-9]+', Name.Tag, 'pytag'),
866
+ (r'<\s*[a-zA-Z0-9:.]+', Name.Tag, 'tag'),
867
+ include('variable'),
868
+ (r'[<$]', Other),
869
+ ],
870
+ 'pytag': [
871
+ (r'\s+', Text),
872
+ (r'[\w:-]+\s*=', Name.Attribute, 'pyattr'),
873
+ (r'/?\s*>', Name.Tag, '#pop'),
874
+ ],
875
+ 'pyattr': [
876
+ ('(")(.*?)(")', bygroups(String, using(PythonLexer), String), '#pop'),
877
+ ("(')(.*?)(')", bygroups(String, using(PythonLexer), String), '#pop'),
878
+ (r'[^\s>]+', String, '#pop'),
879
+ ],
880
+ 'tag': [
881
+ (r'\s+', Text),
882
+ (r'py:[\w-]+\s*=', Name.Attribute, 'pyattr'),
883
+ (r'[\w:-]+\s*=', Name.Attribute, 'attr'),
884
+ (r'/?\s*>', Name.Tag, '#pop'),
885
+ ],
886
+ 'attr': [
887
+ ('"', String, 'attr-dstring'),
888
+ ("'", String, 'attr-sstring'),
889
+ (r'[^\s>]*', String, '#pop')
890
+ ],
891
+ 'attr-dstring': [
892
+ ('"', String, '#pop'),
893
+ include('strings'),
894
+ ("'", String)
895
+ ],
896
+ 'attr-sstring': [
897
+ ("'", String, '#pop'),
898
+ include('strings'),
899
+ ("'", String)
900
+ ],
901
+ 'strings': [
902
+ ('[^"\'$]+', String),
903
+ include('variable')
904
+ ],
905
+ 'variable': [
906
+ (r'(?<!\$)(\$\{)(.+?)(\})',
907
+ bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
908
+ (r'(?<!\$)(\$)([a-zA-Z_][\w\.]*)',
909
+ Name.Variable),
910
+ ]
911
+ }
912
+
913
+
914
+ class HtmlGenshiLexer(DelegatingLexer):
915
+ """
916
+ A lexer that highlights `genshi <http://genshi.edgewall.org/>`_ and
917
+ `kid <http://kid-templating.org/>`_ kid HTML templates.
918
+ """
919
+
920
+ name = 'HTML+Genshi'
921
+ aliases = ['html+genshi', 'html+kid']
922
+ alias_filenames = ['*.html', '*.htm', '*.xhtml']
923
+ mimetypes = ['text/html+genshi']
924
+
925
+ def __init__(self, **options):
926
+ super().__init__(HtmlLexer, GenshiMarkupLexer, **options)
927
+
928
+ def analyse_text(text):
929
+ rv = 0.0
930
+ if re.search(r'\$\{.*?\}', text) is not None:
931
+ rv += 0.2
932
+ if re.search(r'py:(.*?)=["\']', text) is not None:
933
+ rv += 0.2
934
+ return rv + HtmlLexer.analyse_text(text) - 0.01
935
+
936
+
937
+ class GenshiLexer(DelegatingLexer):
938
+ """
939
+ A lexer that highlights `genshi <http://genshi.edgewall.org/>`_ and
940
+ `kid <http://kid-templating.org/>`_ kid XML templates.
941
+ """
942
+
943
+ name = 'Genshi'
944
+ aliases = ['genshi', 'kid', 'xml+genshi', 'xml+kid']
945
+ filenames = ['*.kid']
946
+ alias_filenames = ['*.xml']
947
+ mimetypes = ['application/x-genshi', 'application/x-kid']
948
+
949
+ def __init__(self, **options):
950
+ super().__init__(XmlLexer, GenshiMarkupLexer, **options)
951
+
952
+ def analyse_text(text):
953
+ rv = 0.0
954
+ if re.search(r'\$\{.*?\}', text) is not None:
955
+ rv += 0.2
956
+ if re.search(r'py:(.*?)=["\']', text) is not None:
957
+ rv += 0.2
958
+ return rv + XmlLexer.analyse_text(text) - 0.01
959
+
960
+
961
+ class JavascriptGenshiLexer(DelegatingLexer):
962
+ """
963
+ A lexer that highlights javascript code in genshi text templates.
964
+ """
965
+
966
+ name = 'JavaScript+Genshi Text'
967
+ aliases = ['js+genshitext', 'js+genshi', 'javascript+genshitext',
968
+ 'javascript+genshi']
969
+ alias_filenames = ['*.js']
970
+ mimetypes = ['application/x-javascript+genshi',
971
+ 'text/x-javascript+genshi',
972
+ 'text/javascript+genshi']
973
+
974
+ def __init__(self, **options):
975
+ super().__init__(JavascriptLexer, GenshiTextLexer, **options)
976
+
977
+ def analyse_text(text):
978
+ return GenshiLexer.analyse_text(text) - 0.05
979
+
980
+
981
+ class CssGenshiLexer(DelegatingLexer):
982
+ """
983
+ A lexer that highlights CSS definitions in genshi text templates.
984
+ """
985
+
986
+ name = 'CSS+Genshi Text'
987
+ aliases = ['css+genshitext', 'css+genshi']
988
+ alias_filenames = ['*.css']
989
+ mimetypes = ['text/css+genshi']
990
+
991
+ def __init__(self, **options):
992
+ super().__init__(CssLexer, GenshiTextLexer, **options)
993
+
994
+ def analyse_text(text):
995
+ return GenshiLexer.analyse_text(text) - 0.05
996
+
997
+
998
+ class RhtmlLexer(DelegatingLexer):
999
+ """
1000
+ Subclass of the ERB lexer that highlights the unlexed data with the
1001
+ html lexer.
1002
+
1003
+ Nested Javascript and CSS is highlighted too.
1004
+ """
1005
+
1006
+ name = 'RHTML'
1007
+ aliases = ['rhtml', 'html+erb', 'html+ruby']
1008
+ filenames = ['*.rhtml']
1009
+ alias_filenames = ['*.html', '*.htm', '*.xhtml']
1010
+ mimetypes = ['text/html+ruby']
1011
+
1012
+ def __init__(self, **options):
1013
+ super().__init__(HtmlLexer, ErbLexer, **options)
1014
+
1015
+ def analyse_text(text):
1016
+ rv = ErbLexer.analyse_text(text) - 0.01
1017
+ if html_doctype_matches(text):
1018
+ # one more than the XmlErbLexer returns
1019
+ rv += 0.5
1020
+ return rv
1021
+
1022
+
1023
+ class XmlErbLexer(DelegatingLexer):
1024
+ """
1025
+ Subclass of `ErbLexer` which highlights data outside preprocessor
1026
+ directives with the `XmlLexer`.
1027
+ """
1028
+
1029
+ name = 'XML+Ruby'
1030
+ aliases = ['xml+erb', 'xml+ruby']
1031
+ alias_filenames = ['*.xml']
1032
+ mimetypes = ['application/xml+ruby']
1033
+
1034
+ def __init__(self, **options):
1035
+ super().__init__(XmlLexer, ErbLexer, **options)
1036
+
1037
+ def analyse_text(text):
1038
+ rv = ErbLexer.analyse_text(text) - 0.01
1039
+ if looks_like_xml(text):
1040
+ rv += 0.4
1041
+ return rv
1042
+
1043
+
1044
+ class CssErbLexer(DelegatingLexer):
1045
+ """
1046
+ Subclass of `ErbLexer` which highlights unlexed data with the `CssLexer`.
1047
+ """
1048
+
1049
+ name = 'CSS+Ruby'
1050
+ aliases = ['css+erb', 'css+ruby']
1051
+ alias_filenames = ['*.css']
1052
+ mimetypes = ['text/css+ruby']
1053
+
1054
+ def __init__(self, **options):
1055
+ super().__init__(CssLexer, ErbLexer, **options)
1056
+
1057
+ def analyse_text(text):
1058
+ return ErbLexer.analyse_text(text) - 0.05
1059
+
1060
+
1061
+ class JavascriptErbLexer(DelegatingLexer):
1062
+ """
1063
+ Subclass of `ErbLexer` which highlights unlexed data with the
1064
+ `JavascriptLexer`.
1065
+ """
1066
+
1067
+ name = 'JavaScript+Ruby'
1068
+ aliases = ['js+erb', 'javascript+erb', 'js+ruby', 'javascript+ruby']
1069
+ alias_filenames = ['*.js']
1070
+ mimetypes = ['application/x-javascript+ruby',
1071
+ 'text/x-javascript+ruby',
1072
+ 'text/javascript+ruby']
1073
+
1074
+ def __init__(self, **options):
1075
+ super().__init__(JavascriptLexer, ErbLexer, **options)
1076
+
1077
+ def analyse_text(text):
1078
+ return ErbLexer.analyse_text(text) - 0.05
1079
+
1080
+
1081
+ class HtmlPhpLexer(DelegatingLexer):
1082
+ """
1083
+ Subclass of `PhpLexer` that highlights unhandled data with the `HtmlLexer`.
1084
+
1085
+ Nested Javascript and CSS is highlighted too.
1086
+ """
1087
+
1088
+ name = 'HTML+PHP'
1089
+ aliases = ['html+php']
1090
+ filenames = ['*.phtml']
1091
+ alias_filenames = ['*.php', '*.html', '*.htm', '*.xhtml',
1092
+ '*.php[345]']
1093
+ mimetypes = ['application/x-php',
1094
+ 'application/x-httpd-php', 'application/x-httpd-php3',
1095
+ 'application/x-httpd-php4', 'application/x-httpd-php5']
1096
+
1097
+ def __init__(self, **options):
1098
+ super().__init__(HtmlLexer, PhpLexer, **options)
1099
+
1100
+ def analyse_text(text):
1101
+ rv = PhpLexer.analyse_text(text) - 0.01
1102
+ if html_doctype_matches(text):
1103
+ rv += 0.5
1104
+ return rv
1105
+
1106
+
1107
+ class XmlPhpLexer(DelegatingLexer):
1108
+ """
1109
+ Subclass of `PhpLexer` that highlights unhandled data with the `XmlLexer`.
1110
+ """
1111
+
1112
+ name = 'XML+PHP'
1113
+ aliases = ['xml+php']
1114
+ alias_filenames = ['*.xml', '*.php', '*.php[345]']
1115
+ mimetypes = ['application/xml+php']
1116
+
1117
+ def __init__(self, **options):
1118
+ super().__init__(XmlLexer, PhpLexer, **options)
1119
+
1120
+ def analyse_text(text):
1121
+ rv = PhpLexer.analyse_text(text) - 0.01
1122
+ if looks_like_xml(text):
1123
+ rv += 0.4
1124
+ return rv
1125
+
1126
+
1127
+ class CssPhpLexer(DelegatingLexer):
1128
+ """
1129
+ Subclass of `PhpLexer` which highlights unmatched data with the `CssLexer`.
1130
+ """
1131
+
1132
+ name = 'CSS+PHP'
1133
+ aliases = ['css+php']
1134
+ alias_filenames = ['*.css']
1135
+ mimetypes = ['text/css+php']
1136
+
1137
+ def __init__(self, **options):
1138
+ super().__init__(CssLexer, PhpLexer, **options)
1139
+
1140
+ def analyse_text(text):
1141
+ return PhpLexer.analyse_text(text) - 0.05
1142
+
1143
+
1144
+ class JavascriptPhpLexer(DelegatingLexer):
1145
+ """
1146
+ Subclass of `PhpLexer` which highlights unmatched data with the
1147
+ `JavascriptLexer`.
1148
+ """
1149
+
1150
+ name = 'JavaScript+PHP'
1151
+ aliases = ['js+php', 'javascript+php']
1152
+ alias_filenames = ['*.js']
1153
+ mimetypes = ['application/x-javascript+php',
1154
+ 'text/x-javascript+php',
1155
+ 'text/javascript+php']
1156
+
1157
+ def __init__(self, **options):
1158
+ super().__init__(JavascriptLexer, PhpLexer, **options)
1159
+
1160
+ def analyse_text(text):
1161
+ return PhpLexer.analyse_text(text)
1162
+
1163
+
1164
+ class HtmlSmartyLexer(DelegatingLexer):
1165
+ """
1166
+ Subclass of the `SmartyLexer` that highlights unlexed data with the
1167
+ `HtmlLexer`.
1168
+
1169
+ Nested Javascript and CSS is highlighted too.
1170
+ """
1171
+
1172
+ name = 'HTML+Smarty'
1173
+ aliases = ['html+smarty']
1174
+ alias_filenames = ['*.html', '*.htm', '*.xhtml', '*.tpl']
1175
+ mimetypes = ['text/html+smarty']
1176
+
1177
+ def __init__(self, **options):
1178
+ super().__init__(HtmlLexer, SmartyLexer, **options)
1179
+
1180
+ def analyse_text(text):
1181
+ rv = SmartyLexer.analyse_text(text) - 0.01
1182
+ if html_doctype_matches(text):
1183
+ rv += 0.5
1184
+ return rv
1185
+
1186
+
1187
+ class XmlSmartyLexer(DelegatingLexer):
1188
+ """
1189
+ Subclass of the `SmartyLexer` that highlights unlexed data with the
1190
+ `XmlLexer`.
1191
+ """
1192
+
1193
+ name = 'XML+Smarty'
1194
+ aliases = ['xml+smarty']
1195
+ alias_filenames = ['*.xml', '*.tpl']
1196
+ mimetypes = ['application/xml+smarty']
1197
+
1198
+ def __init__(self, **options):
1199
+ super().__init__(XmlLexer, SmartyLexer, **options)
1200
+
1201
+ def analyse_text(text):
1202
+ rv = SmartyLexer.analyse_text(text) - 0.01
1203
+ if looks_like_xml(text):
1204
+ rv += 0.4
1205
+ return rv
1206
+
1207
+
1208
+ class CssSmartyLexer(DelegatingLexer):
1209
+ """
1210
+ Subclass of the `SmartyLexer` that highlights unlexed data with the
1211
+ `CssLexer`.
1212
+ """
1213
+
1214
+ name = 'CSS+Smarty'
1215
+ aliases = ['css+smarty']
1216
+ alias_filenames = ['*.css', '*.tpl']
1217
+ mimetypes = ['text/css+smarty']
1218
+
1219
+ def __init__(self, **options):
1220
+ super().__init__(CssLexer, SmartyLexer, **options)
1221
+
1222
+ def analyse_text(text):
1223
+ return SmartyLexer.analyse_text(text) - 0.05
1224
+
1225
+
1226
+ class JavascriptSmartyLexer(DelegatingLexer):
1227
+ """
1228
+ Subclass of the `SmartyLexer` that highlights unlexed data with the
1229
+ `JavascriptLexer`.
1230
+ """
1231
+
1232
+ name = 'JavaScript+Smarty'
1233
+ aliases = ['js+smarty', 'javascript+smarty']
1234
+ alias_filenames = ['*.js', '*.tpl']
1235
+ mimetypes = ['application/x-javascript+smarty',
1236
+ 'text/x-javascript+smarty',
1237
+ 'text/javascript+smarty']
1238
+
1239
+ def __init__(self, **options):
1240
+ super().__init__(JavascriptLexer, SmartyLexer, **options)
1241
+
1242
+ def analyse_text(text):
1243
+ return SmartyLexer.analyse_text(text) - 0.05
1244
+
1245
+
1246
+ class HtmlDjangoLexer(DelegatingLexer):
1247
+ """
1248
+ Subclass of the `DjangoLexer` that highlights unlexed data with the
1249
+ `HtmlLexer`.
1250
+
1251
+ Nested Javascript and CSS is highlighted too.
1252
+ """
1253
+
1254
+ name = 'HTML+Django/Jinja'
1255
+ aliases = ['html+django', 'html+jinja', 'htmldjango']
1256
+ alias_filenames = ['*.html', '*.htm', '*.xhtml']
1257
+ mimetypes = ['text/html+django', 'text/html+jinja']
1258
+
1259
+ def __init__(self, **options):
1260
+ super().__init__(HtmlLexer, DjangoLexer, **options)
1261
+
1262
+ def analyse_text(text):
1263
+ rv = DjangoLexer.analyse_text(text) - 0.01
1264
+ if html_doctype_matches(text):
1265
+ rv += 0.5
1266
+ return rv
1267
+
1268
+
1269
+ class XmlDjangoLexer(DelegatingLexer):
1270
+ """
1271
+ Subclass of the `DjangoLexer` that highlights unlexed data with the
1272
+ `XmlLexer`.
1273
+ """
1274
+
1275
+ name = 'XML+Django/Jinja'
1276
+ aliases = ['xml+django', 'xml+jinja']
1277
+ alias_filenames = ['*.xml']
1278
+ mimetypes = ['application/xml+django', 'application/xml+jinja']
1279
+
1280
+ def __init__(self, **options):
1281
+ super().__init__(XmlLexer, DjangoLexer, **options)
1282
+
1283
+ def analyse_text(text):
1284
+ rv = DjangoLexer.analyse_text(text) - 0.01
1285
+ if looks_like_xml(text):
1286
+ rv += 0.4
1287
+ return rv
1288
+
1289
+
1290
+ class CssDjangoLexer(DelegatingLexer):
1291
+ """
1292
+ Subclass of the `DjangoLexer` that highlights unlexed data with the
1293
+ `CssLexer`.
1294
+ """
1295
+
1296
+ name = 'CSS+Django/Jinja'
1297
+ aliases = ['css+django', 'css+jinja']
1298
+ alias_filenames = ['*.css']
1299
+ mimetypes = ['text/css+django', 'text/css+jinja']
1300
+
1301
+ def __init__(self, **options):
1302
+ super().__init__(CssLexer, DjangoLexer, **options)
1303
+
1304
+ def analyse_text(text):
1305
+ return DjangoLexer.analyse_text(text) - 0.05
1306
+
1307
+
1308
+ class JavascriptDjangoLexer(DelegatingLexer):
1309
+ """
1310
+ Subclass of the `DjangoLexer` that highlights unlexed data with the
1311
+ `JavascriptLexer`.
1312
+ """
1313
+
1314
+ name = 'JavaScript+Django/Jinja'
1315
+ aliases = ['js+django', 'javascript+django',
1316
+ 'js+jinja', 'javascript+jinja']
1317
+ alias_filenames = ['*.js']
1318
+ mimetypes = ['application/x-javascript+django',
1319
+ 'application/x-javascript+jinja',
1320
+ 'text/x-javascript+django',
1321
+ 'text/x-javascript+jinja',
1322
+ 'text/javascript+django',
1323
+ 'text/javascript+jinja']
1324
+
1325
+ def __init__(self, **options):
1326
+ super().__init__(JavascriptLexer, DjangoLexer, **options)
1327
+
1328
+ def analyse_text(text):
1329
+ return DjangoLexer.analyse_text(text) - 0.05
1330
+
1331
+
1332
+ class JspRootLexer(RegexLexer):
1333
+ """
1334
+ Base for the `JspLexer`. Yields `Token.Other` for area outside of
1335
+ JSP tags.
1336
+
1337
+ .. versionadded:: 0.7
1338
+ """
1339
+
1340
+ tokens = {
1341
+ 'root': [
1342
+ (r'<%\S?', Keyword, 'sec'),
1343
+ # FIXME: I want to make these keywords but still parse attributes.
1344
+ (r'</?jsp:(forward|getProperty|include|plugin|setProperty|useBean).*?>',
1345
+ Keyword),
1346
+ (r'[^<]+', Other),
1347
+ (r'<', Other),
1348
+ ],
1349
+ 'sec': [
1350
+ (r'%>', Keyword, '#pop'),
1351
+ # note: '\w\W' != '.' without DOTALL.
1352
+ (r'[\w\W]+?(?=%>|\Z)', using(JavaLexer)),
1353
+ ],
1354
+ }
1355
+
1356
+
1357
+ class JspLexer(DelegatingLexer):
1358
+ """
1359
+ Lexer for Java Server Pages.
1360
+
1361
+ .. versionadded:: 0.7
1362
+ """
1363
+ name = 'Java Server Page'
1364
+ aliases = ['jsp']
1365
+ filenames = ['*.jsp']
1366
+ mimetypes = ['application/x-jsp']
1367
+
1368
+ def __init__(self, **options):
1369
+ super().__init__(XmlLexer, JspRootLexer, **options)
1370
+
1371
+ def analyse_text(text):
1372
+ rv = JavaLexer.analyse_text(text) - 0.01
1373
+ if looks_like_xml(text):
1374
+ rv += 0.4
1375
+ if '<%' in text and '%>' in text:
1376
+ rv += 0.1
1377
+ return rv
1378
+
1379
+
1380
+ class EvoqueLexer(RegexLexer):
1381
+ """
1382
+ For files using the Evoque templating system.
1383
+
1384
+ .. versionadded:: 1.1
1385
+ """
1386
+ name = 'Evoque'
1387
+ aliases = ['evoque']
1388
+ filenames = ['*.evoque']
1389
+ mimetypes = ['application/x-evoque']
1390
+
1391
+ flags = re.DOTALL
1392
+
1393
+ tokens = {
1394
+ 'root': [
1395
+ (r'[^#$]+', Other),
1396
+ (r'#\[', Comment.Multiline, 'comment'),
1397
+ (r'\$\$', Other),
1398
+ # svn keywords
1399
+ (r'\$\w+:[^$\n]*\$', Comment.Multiline),
1400
+ # directives: begin, end
1401
+ (r'(\$)(begin|end)(\{(%)?)(.*?)((?(4)%)\})',
1402
+ bygroups(Punctuation, Name.Builtin, Punctuation, None,
1403
+ String, Punctuation)),
1404
+ # directives: evoque, overlay
1405
+ # see doc for handling first name arg: /directives/evoque/
1406
+ # + minor inconsistency: the "name" in e.g. $overlay{name=site_base}
1407
+ # should be using(PythonLexer), not passed out as String
1408
+ (r'(\$)(evoque|overlay)(\{(%)?)(\s*[#\w\-"\'.]+[^=,%}]+?)?'
1409
+ r'(.*?)((?(4)%)\})',
1410
+ bygroups(Punctuation, Name.Builtin, Punctuation, None,
1411
+ String, using(PythonLexer), Punctuation)),
1412
+ # directives: if, for, prefer, test
1413
+ (r'(\$)(\w+)(\{(%)?)(.*?)((?(4)%)\})',
1414
+ bygroups(Punctuation, Name.Builtin, Punctuation, None,
1415
+ using(PythonLexer), Punctuation)),
1416
+ # directive clauses (no {} expression)
1417
+ (r'(\$)(else|rof|fi)', bygroups(Punctuation, Name.Builtin)),
1418
+ # expressions
1419
+ (r'(\$\{(%)?)(.*?)((!)(.*?))?((?(2)%)\})',
1420
+ bygroups(Punctuation, None, using(PythonLexer),
1421
+ Name.Builtin, None, None, Punctuation)),
1422
+ (r'#', Other),
1423
+ ],
1424
+ 'comment': [
1425
+ (r'[^\]#]', Comment.Multiline),
1426
+ (r'#\[', Comment.Multiline, '#push'),
1427
+ (r'\]#', Comment.Multiline, '#pop'),
1428
+ (r'[\]#]', Comment.Multiline)
1429
+ ],
1430
+ }
1431
+
1432
+ def analyse_text(text):
1433
+ """Evoque templates use $evoque, which is unique."""
1434
+ if '$evoque' in text:
1435
+ return 1
1436
+
1437
+ class EvoqueHtmlLexer(DelegatingLexer):
1438
+ """
1439
+ Subclass of the `EvoqueLexer` that highlights unlexed data with the
1440
+ `HtmlLexer`.
1441
+
1442
+ .. versionadded:: 1.1
1443
+ """
1444
+ name = 'HTML+Evoque'
1445
+ aliases = ['html+evoque']
1446
+ filenames = ['*.html']
1447
+ mimetypes = ['text/html+evoque']
1448
+
1449
+ def __init__(self, **options):
1450
+ super().__init__(HtmlLexer, EvoqueLexer, **options)
1451
+
1452
+ def analyse_text(text):
1453
+ return EvoqueLexer.analyse_text(text)
1454
+
1455
+
1456
+ class EvoqueXmlLexer(DelegatingLexer):
1457
+ """
1458
+ Subclass of the `EvoqueLexer` that highlights unlexed data with the
1459
+ `XmlLexer`.
1460
+
1461
+ .. versionadded:: 1.1
1462
+ """
1463
+ name = 'XML+Evoque'
1464
+ aliases = ['xml+evoque']
1465
+ filenames = ['*.xml']
1466
+ mimetypes = ['application/xml+evoque']
1467
+
1468
+ def __init__(self, **options):
1469
+ super().__init__(XmlLexer, EvoqueLexer, **options)
1470
+
1471
+ def analyse_text(text):
1472
+ return EvoqueLexer.analyse_text(text)
1473
+
1474
+
1475
+ class ColdfusionLexer(RegexLexer):
1476
+ """
1477
+ Coldfusion statements
1478
+ """
1479
+ name = 'cfstatement'
1480
+ aliases = ['cfs']
1481
+ filenames = []
1482
+ mimetypes = []
1483
+ flags = re.IGNORECASE
1484
+
1485
+ tokens = {
1486
+ 'root': [
1487
+ (r'//.*?\n', Comment.Single),
1488
+ (r'/\*(?:.|\n)*?\*/', Comment.Multiline),
1489
+ (r'\+\+|--', Operator),
1490
+ (r'[-+*/^&=!]', Operator),
1491
+ (r'<=|>=|<|>|==', Operator),
1492
+ (r'mod\b', Operator),
1493
+ (r'(eq|lt|gt|lte|gte|not|is|and|or)\b', Operator),
1494
+ (r'\|\||&&', Operator),
1495
+ (r'\?', Operator),
1496
+ (r'"', String.Double, 'string'),
1497
+ # There is a special rule for allowing html in single quoted
1498
+ # strings, evidently.
1499
+ (r"'.*?'", String.Single),
1500
+ (r'\d+', Number),
1501
+ (r'(if|else|len|var|xml|default|break|switch|component|property|function|do|'
1502
+ r'try|catch|in|continue|for|return|while|required|any|array|binary|boolean|'
1503
+ r'component|date|guid|numeric|query|string|struct|uuid|case)\b', Keyword),
1504
+ (r'(true|false|null)\b', Keyword.Constant),
1505
+ (r'(application|session|client|cookie|super|this|variables|arguments)\b',
1506
+ Name.Constant),
1507
+ (r'([a-z_$][\w.]*)(\s*)(\()',
1508
+ bygroups(Name.Function, Text, Punctuation)),
1509
+ (r'[a-z_$][\w.]*', Name.Variable),
1510
+ (r'[()\[\]{};:,.\\]', Punctuation),
1511
+ (r'\s+', Text),
1512
+ ],
1513
+ 'string': [
1514
+ (r'""', String.Double),
1515
+ (r'#.+?#', String.Interp),
1516
+ (r'[^"#]+', String.Double),
1517
+ (r'#', String.Double),
1518
+ (r'"', String.Double, '#pop'),
1519
+ ],
1520
+ }
1521
+
1522
+
1523
+ class ColdfusionMarkupLexer(RegexLexer):
1524
+ """
1525
+ Coldfusion markup only
1526
+ """
1527
+ name = 'Coldfusion'
1528
+ aliases = ['cf']
1529
+ filenames = []
1530
+ mimetypes = []
1531
+
1532
+ tokens = {
1533
+ 'root': [
1534
+ (r'[^<]+', Other),
1535
+ include('tags'),
1536
+ (r'<[^<>]*', Other),
1537
+ ],
1538
+ 'tags': [
1539
+ (r'<!---', Comment.Multiline, 'cfcomment'),
1540
+ (r'(?s)<!--.*?-->', Comment),
1541
+ (r'<cfoutput.*?>', Name.Builtin, 'cfoutput'),
1542
+ (r'(?s)(<cfscript.*?>)(.+?)(</cfscript.*?>)',
1543
+ bygroups(Name.Builtin, using(ColdfusionLexer), Name.Builtin)),
1544
+ # negative lookbehind is for strings with embedded >
1545
+ (r'(?s)(</?cf(?:component|include|if|else|elseif|loop|return|'
1546
+ r'dbinfo|dump|abort|location|invoke|throw|file|savecontent|'
1547
+ r'mailpart|mail|header|content|zip|image|lock|argument|try|'
1548
+ r'catch|break|directory|http|set|function|param)\b)(.*?)((?<!\\)>)',
1549
+ bygroups(Name.Builtin, using(ColdfusionLexer), Name.Builtin)),
1550
+ ],
1551
+ 'cfoutput': [
1552
+ (r'[^#<]+', Other),
1553
+ (r'(#)(.*?)(#)', bygroups(Punctuation, using(ColdfusionLexer),
1554
+ Punctuation)),
1555
+ # (r'<cfoutput.*?>', Name.Builtin, '#push'),
1556
+ (r'</cfoutput.*?>', Name.Builtin, '#pop'),
1557
+ include('tags'),
1558
+ (r'(?s)<[^<>]*', Other),
1559
+ (r'#', Other),
1560
+ ],
1561
+ 'cfcomment': [
1562
+ (r'<!---', Comment.Multiline, '#push'),
1563
+ (r'--->', Comment.Multiline, '#pop'),
1564
+ (r'([^<-]|<(?!!---)|-(?!-->))+', Comment.Multiline),
1565
+ ],
1566
+ }
1567
+
1568
+
1569
+ class ColdfusionHtmlLexer(DelegatingLexer):
1570
+ """
1571
+ Coldfusion markup in html
1572
+ """
1573
+ name = 'Coldfusion HTML'
1574
+ aliases = ['cfm']
1575
+ filenames = ['*.cfm', '*.cfml']
1576
+ mimetypes = ['application/x-coldfusion']
1577
+
1578
+ def __init__(self, **options):
1579
+ super().__init__(HtmlLexer, ColdfusionMarkupLexer, **options)
1580
+
1581
+
1582
+ class ColdfusionCFCLexer(DelegatingLexer):
1583
+ """
1584
+ Coldfusion markup/script components
1585
+
1586
+ .. versionadded:: 2.0
1587
+ """
1588
+ name = 'Coldfusion CFC'
1589
+ aliases = ['cfc']
1590
+ filenames = ['*.cfc']
1591
+ mimetypes = []
1592
+
1593
+ def __init__(self, **options):
1594
+ super().__init__(ColdfusionHtmlLexer, ColdfusionLexer, **options)
1595
+
1596
+
1597
+ class SspLexer(DelegatingLexer):
1598
+ """
1599
+ Lexer for Scalate Server Pages.
1600
+
1601
+ .. versionadded:: 1.4
1602
+ """
1603
+ name = 'Scalate Server Page'
1604
+ aliases = ['ssp']
1605
+ filenames = ['*.ssp']
1606
+ mimetypes = ['application/x-ssp']
1607
+
1608
+ def __init__(self, **options):
1609
+ super().__init__(XmlLexer, JspRootLexer, **options)
1610
+
1611
+ def analyse_text(text):
1612
+ rv = 0.0
1613
+ if re.search(r'val \w+\s*:', text):
1614
+ rv += 0.6
1615
+ if looks_like_xml(text):
1616
+ rv += 0.2
1617
+ if '<%' in text and '%>' in text:
1618
+ rv += 0.1
1619
+ return rv
1620
+
1621
+
1622
+ class TeaTemplateRootLexer(RegexLexer):
1623
+ """
1624
+ Base for the `TeaTemplateLexer`. Yields `Token.Other` for area outside of
1625
+ code blocks.
1626
+
1627
+ .. versionadded:: 1.5
1628
+ """
1629
+
1630
+ tokens = {
1631
+ 'root': [
1632
+ (r'<%\S?', Keyword, 'sec'),
1633
+ (r'[^<]+', Other),
1634
+ (r'<', Other),
1635
+ ],
1636
+ 'sec': [
1637
+ (r'%>', Keyword, '#pop'),
1638
+ # note: '\w\W' != '.' without DOTALL.
1639
+ (r'[\w\W]+?(?=%>|\Z)', using(TeaLangLexer)),
1640
+ ],
1641
+ }
1642
+
1643
+
1644
+ class TeaTemplateLexer(DelegatingLexer):
1645
+ """
1646
+ Lexer for `Tea Templates <http://teatrove.org/>`_.
1647
+
1648
+ .. versionadded:: 1.5
1649
+ """
1650
+ name = 'Tea'
1651
+ aliases = ['tea']
1652
+ filenames = ['*.tea']
1653
+ mimetypes = ['text/x-tea']
1654
+
1655
+ def __init__(self, **options):
1656
+ super().__init__(XmlLexer, TeaTemplateRootLexer, **options)
1657
+
1658
+ def analyse_text(text):
1659
+ rv = TeaLangLexer.analyse_text(text) - 0.01
1660
+ if looks_like_xml(text):
1661
+ rv += 0.4
1662
+ if '<%' in text and '%>' in text:
1663
+ rv += 0.1
1664
+ return rv
1665
+
1666
+
1667
+ class LassoHtmlLexer(DelegatingLexer):
1668
+ """
1669
+ Subclass of the `LassoLexer` which highlights unhandled data with the
1670
+ `HtmlLexer`.
1671
+
1672
+ Nested JavaScript and CSS is also highlighted.
1673
+
1674
+ .. versionadded:: 1.6
1675
+ """
1676
+
1677
+ name = 'HTML+Lasso'
1678
+ aliases = ['html+lasso']
1679
+ alias_filenames = ['*.html', '*.htm', '*.xhtml', '*.lasso', '*.lasso[89]',
1680
+ '*.incl', '*.inc', '*.las']
1681
+ mimetypes = ['text/html+lasso',
1682
+ 'application/x-httpd-lasso',
1683
+ 'application/x-httpd-lasso[89]']
1684
+
1685
+ def __init__(self, **options):
1686
+ super().__init__(HtmlLexer, LassoLexer, **options)
1687
+
1688
+ def analyse_text(text):
1689
+ rv = LassoLexer.analyse_text(text) - 0.01
1690
+ if html_doctype_matches(text): # same as HTML lexer
1691
+ rv += 0.5
1692
+ return rv
1693
+
1694
+
1695
+ class LassoXmlLexer(DelegatingLexer):
1696
+ """
1697
+ Subclass of the `LassoLexer` which highlights unhandled data with the
1698
+ `XmlLexer`.
1699
+
1700
+ .. versionadded:: 1.6
1701
+ """
1702
+
1703
+ name = 'XML+Lasso'
1704
+ aliases = ['xml+lasso']
1705
+ alias_filenames = ['*.xml', '*.lasso', '*.lasso[89]',
1706
+ '*.incl', '*.inc', '*.las']
1707
+ mimetypes = ['application/xml+lasso']
1708
+
1709
+ def __init__(self, **options):
1710
+ super().__init__(XmlLexer, LassoLexer, **options)
1711
+
1712
+ def analyse_text(text):
1713
+ rv = LassoLexer.analyse_text(text) - 0.01
1714
+ if looks_like_xml(text):
1715
+ rv += 0.4
1716
+ return rv
1717
+
1718
+
1719
+ class LassoCssLexer(DelegatingLexer):
1720
+ """
1721
+ Subclass of the `LassoLexer` which highlights unhandled data with the
1722
+ `CssLexer`.
1723
+
1724
+ .. versionadded:: 1.6
1725
+ """
1726
+
1727
+ name = 'CSS+Lasso'
1728
+ aliases = ['css+lasso']
1729
+ alias_filenames = ['*.css']
1730
+ mimetypes = ['text/css+lasso']
1731
+
1732
+ def __init__(self, **options):
1733
+ options['requiredelimiters'] = True
1734
+ super().__init__(CssLexer, LassoLexer, **options)
1735
+
1736
+ def analyse_text(text):
1737
+ rv = LassoLexer.analyse_text(text) - 0.05
1738
+ if re.search(r'\w+:.+?;', text):
1739
+ rv += 0.1
1740
+ if 'padding:' in text:
1741
+ rv += 0.1
1742
+ return rv
1743
+
1744
+
1745
+ class LassoJavascriptLexer(DelegatingLexer):
1746
+ """
1747
+ Subclass of the `LassoLexer` which highlights unhandled data with the
1748
+ `JavascriptLexer`.
1749
+
1750
+ .. versionadded:: 1.6
1751
+ """
1752
+
1753
+ name = 'JavaScript+Lasso'
1754
+ aliases = ['js+lasso', 'javascript+lasso']
1755
+ alias_filenames = ['*.js']
1756
+ mimetypes = ['application/x-javascript+lasso',
1757
+ 'text/x-javascript+lasso',
1758
+ 'text/javascript+lasso']
1759
+
1760
+ def __init__(self, **options):
1761
+ options['requiredelimiters'] = True
1762
+ super().__init__(JavascriptLexer, LassoLexer, **options)
1763
+
1764
+ def analyse_text(text):
1765
+ rv = LassoLexer.analyse_text(text) - 0.05
1766
+ return rv
1767
+
1768
+
1769
+ class HandlebarsLexer(RegexLexer):
1770
+ """
1771
+ Generic `handlebars <http://handlebarsjs.com/>` template lexer.
1772
+
1773
+ Highlights only the Handlebars template tags (stuff between `{{` and `}}`).
1774
+ Everything else is left for a delegating lexer.
1775
+
1776
+ .. versionadded:: 2.0
1777
+ """
1778
+
1779
+ name = "Handlebars"
1780
+ aliases = ['handlebars']
1781
+
1782
+ tokens = {
1783
+ 'root': [
1784
+ (r'[^{]+', Other),
1785
+
1786
+ # Comment start {{! }} or {{!--
1787
+ (r'\{\{!.*\}\}', Comment),
1788
+
1789
+ # HTML Escaping open {{{expression
1790
+ (r'(\{\{\{)(\s*)', bygroups(Comment.Special, Text), 'tag'),
1791
+
1792
+ # {{blockOpen {{#blockOpen {{/blockClose with optional tilde ~
1793
+ (r'(\{\{)([#~/]+)([^\s}]*)',
1794
+ bygroups(Comment.Preproc, Number.Attribute, Number.Attribute), 'tag'),
1795
+ (r'(\{\{)(\s*)', bygroups(Comment.Preproc, Text), 'tag'),
1796
+ ],
1797
+
1798
+ 'tag': [
1799
+ (r'\s+', Text),
1800
+ # HTML Escaping close }}}
1801
+ (r'\}\}\}', Comment.Special, '#pop'),
1802
+ # blockClose}}, includes optional tilde ~
1803
+ (r'(~?)(\}\})', bygroups(Number, Comment.Preproc), '#pop'),
1804
+
1805
+ # {{opt=something}}
1806
+ (r'([^\s}]+)(=)', bygroups(Name.Attribute, Operator)),
1807
+
1808
+ # Partials {{> ...}}
1809
+ (r'(>)(\s*)(@partial-block)', bygroups(Keyword, Text, Keyword)),
1810
+ (r'(#?>)(\s*)([\w-]+)', bygroups(Keyword, Text, Name.Variable)),
1811
+ (r'(>)(\s*)(\()', bygroups(Keyword, Text, Punctuation),
1812
+ 'dynamic-partial'),
1813
+
1814
+ include('generic'),
1815
+ ],
1816
+ 'dynamic-partial': [
1817
+ (r'\s+', Text),
1818
+ (r'\)', Punctuation, '#pop'),
1819
+
1820
+ (r'(lookup)(\s+)(\.|this)(\s+)', bygroups(Keyword, Text,
1821
+ Name.Variable, Text)),
1822
+ (r'(lookup)(\s+)(\S+)', bygroups(Keyword, Text,
1823
+ using(this, state='variable'))),
1824
+ (r'[\w-]+', Name.Function),
1825
+
1826
+ include('generic'),
1827
+ ],
1828
+ 'variable': [
1829
+ (r'[()/@a-zA-Z][\w-]*', Name.Variable),
1830
+ (r'\.[\w-]+', Name.Variable),
1831
+ (r'(this\/|\.\/|(\.\.\/)+)[\w-]+', Name.Variable),
1832
+ ],
1833
+ 'generic': [
1834
+ include('variable'),
1835
+
1836
+ # borrowed from DjangoLexer
1837
+ (r':?"(\\\\|\\"|[^"])*"', String.Double),
1838
+ (r":?'(\\\\|\\'|[^'])*'", String.Single),
1839
+ (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
1840
+ r"0[xX][0-9a-fA-F]+[Ll]?", Number),
1841
+ ]
1842
+ }
1843
+
1844
+
1845
+ class HandlebarsHtmlLexer(DelegatingLexer):
1846
+ """
1847
+ Subclass of the `HandlebarsLexer` that highlights unlexed data with the
1848
+ `HtmlLexer`.
1849
+
1850
+ .. versionadded:: 2.0
1851
+ """
1852
+
1853
+ name = "HTML+Handlebars"
1854
+ aliases = ["html+handlebars"]
1855
+ filenames = ['*.handlebars', '*.hbs']
1856
+ mimetypes = ['text/html+handlebars', 'text/x-handlebars-template']
1857
+
1858
+ def __init__(self, **options):
1859
+ super().__init__(HtmlLexer, HandlebarsLexer, **options)
1860
+
1861
+
1862
+ class YamlJinjaLexer(DelegatingLexer):
1863
+ """
1864
+ Subclass of the `DjangoLexer` that highlights unlexed data with the
1865
+ `YamlLexer`.
1866
+
1867
+ Commonly used in Saltstack salt states.
1868
+
1869
+ .. versionadded:: 2.0
1870
+ """
1871
+
1872
+ name = 'YAML+Jinja'
1873
+ aliases = ['yaml+jinja', 'salt', 'sls']
1874
+ filenames = ['*.sls']
1875
+ mimetypes = ['text/x-yaml+jinja', 'text/x-sls']
1876
+
1877
+ def __init__(self, **options):
1878
+ super().__init__(YamlLexer, DjangoLexer, **options)
1879
+
1880
+
1881
+ class LiquidLexer(RegexLexer):
1882
+ """
1883
+ Lexer for `Liquid templates
1884
+ <http://www.rubydoc.info/github/Shopify/liquid>`_.
1885
+
1886
+ .. versionadded:: 2.0
1887
+ """
1888
+ name = 'liquid'
1889
+ aliases = ['liquid']
1890
+ filenames = ['*.liquid']
1891
+
1892
+ tokens = {
1893
+ 'root': [
1894
+ (r'[^{]+', Text),
1895
+ # tags and block tags
1896
+ (r'(\{%)(\s*)', bygroups(Punctuation, Whitespace), 'tag-or-block'),
1897
+ # output tags
1898
+ (r'(\{\{)(\s*)([^\s}]+)',
1899
+ bygroups(Punctuation, Whitespace, using(this, state = 'generic')),
1900
+ 'output'),
1901
+ (r'\{', Text)
1902
+ ],
1903
+
1904
+ 'tag-or-block': [
1905
+ # builtin logic blocks
1906
+ (r'(if|unless|elsif|case)(?=\s+)', Keyword.Reserved, 'condition'),
1907
+ (r'(when)(\s+)', bygroups(Keyword.Reserved, Whitespace),
1908
+ combined('end-of-block', 'whitespace', 'generic')),
1909
+ (r'(else)(\s*)(%\})',
1910
+ bygroups(Keyword.Reserved, Whitespace, Punctuation), '#pop'),
1911
+
1912
+ # other builtin blocks
1913
+ (r'(capture)(\s+)([^\s%]+)(\s*)(%\})',
1914
+ bygroups(Name.Tag, Whitespace, using(this, state = 'variable'),
1915
+ Whitespace, Punctuation), '#pop'),
1916
+ (r'(comment)(\s*)(%\})',
1917
+ bygroups(Name.Tag, Whitespace, Punctuation), 'comment'),
1918
+ (r'(raw)(\s*)(%\})',
1919
+ bygroups(Name.Tag, Whitespace, Punctuation), 'raw'),
1920
+
1921
+ # end of block
1922
+ (r'(end(case|unless|if))(\s*)(%\})',
1923
+ bygroups(Keyword.Reserved, None, Whitespace, Punctuation), '#pop'),
1924
+ (r'(end([^\s%]+))(\s*)(%\})',
1925
+ bygroups(Name.Tag, None, Whitespace, Punctuation), '#pop'),
1926
+
1927
+ # builtin tags (assign and include are handled together with usual tags)
1928
+ (r'(cycle)(\s+)(?:([^\s:]*)(:))?(\s*)',
1929
+ bygroups(Name.Tag, Whitespace,
1930
+ using(this, state='generic'), Punctuation, Whitespace),
1931
+ 'variable-tag-markup'),
1932
+
1933
+ # other tags or blocks
1934
+ (r'([^\s%]+)(\s*)', bygroups(Name.Tag, Whitespace), 'tag-markup')
1935
+ ],
1936
+
1937
+ 'output': [
1938
+ include('whitespace'),
1939
+ (r'\}\}', Punctuation, '#pop'), # end of output
1940
+
1941
+ (r'\|', Punctuation, 'filters')
1942
+ ],
1943
+
1944
+ 'filters': [
1945
+ include('whitespace'),
1946
+ (r'\}\}', Punctuation, ('#pop', '#pop')), # end of filters and output
1947
+
1948
+ (r'([^\s|:]+)(:?)(\s*)',
1949
+ bygroups(Name.Function, Punctuation, Whitespace), 'filter-markup')
1950
+ ],
1951
+
1952
+ 'filter-markup': [
1953
+ (r'\|', Punctuation, '#pop'),
1954
+ include('end-of-tag'),
1955
+ include('default-param-markup')
1956
+ ],
1957
+
1958
+ 'condition': [
1959
+ include('end-of-block'),
1960
+ include('whitespace'),
1961
+
1962
+ (r'([^\s=!><]+)(\s*)([=!><]=?)(\s*)(\S+)(\s*)(%\})',
1963
+ bygroups(using(this, state = 'generic'), Whitespace, Operator,
1964
+ Whitespace, using(this, state = 'generic'), Whitespace,
1965
+ Punctuation)),
1966
+ (r'\b!', Operator),
1967
+ (r'\bnot\b', Operator.Word),
1968
+ (r'([\w.\'"]+)(\s+)(contains)(\s+)([\w.\'"]+)',
1969
+ bygroups(using(this, state = 'generic'), Whitespace, Operator.Word,
1970
+ Whitespace, using(this, state = 'generic'))),
1971
+
1972
+ include('generic'),
1973
+ include('whitespace')
1974
+ ],
1975
+
1976
+ 'generic-value': [
1977
+ include('generic'),
1978
+ include('end-at-whitespace')
1979
+ ],
1980
+
1981
+ 'operator': [
1982
+ (r'(\s*)((=|!|>|<)=?)(\s*)',
1983
+ bygroups(Whitespace, Operator, None, Whitespace), '#pop'),
1984
+ (r'(\s*)(\bcontains\b)(\s*)',
1985
+ bygroups(Whitespace, Operator.Word, Whitespace), '#pop'),
1986
+ ],
1987
+
1988
+ 'end-of-tag': [
1989
+ (r'\}\}', Punctuation, '#pop')
1990
+ ],
1991
+
1992
+ 'end-of-block': [
1993
+ (r'%\}', Punctuation, ('#pop', '#pop'))
1994
+ ],
1995
+
1996
+ 'end-at-whitespace': [
1997
+ (r'\s+', Whitespace, '#pop')
1998
+ ],
1999
+
2000
+ # states for unknown markup
2001
+ 'param-markup': [
2002
+ include('whitespace'),
2003
+ # params with colons or equals
2004
+ (r'([^\s=:]+)(\s*)(=|:)',
2005
+ bygroups(Name.Attribute, Whitespace, Operator)),
2006
+ # explicit variables
2007
+ (r'(\{\{)(\s*)([^\s}])(\s*)(\}\})',
2008
+ bygroups(Punctuation, Whitespace, using(this, state = 'variable'),
2009
+ Whitespace, Punctuation)),
2010
+
2011
+ include('string'),
2012
+ include('number'),
2013
+ include('keyword'),
2014
+ (r',', Punctuation)
2015
+ ],
2016
+
2017
+ 'default-param-markup': [
2018
+ include('param-markup'),
2019
+ (r'.', Text) # fallback for switches / variables / un-quoted strings / ...
2020
+ ],
2021
+
2022
+ 'variable-param-markup': [
2023
+ include('param-markup'),
2024
+ include('variable'),
2025
+ (r'.', Text) # fallback
2026
+ ],
2027
+
2028
+ 'tag-markup': [
2029
+ (r'%\}', Punctuation, ('#pop', '#pop')), # end of tag
2030
+ include('default-param-markup')
2031
+ ],
2032
+
2033
+ 'variable-tag-markup': [
2034
+ (r'%\}', Punctuation, ('#pop', '#pop')), # end of tag
2035
+ include('variable-param-markup')
2036
+ ],
2037
+
2038
+ # states for different values types
2039
+ 'keyword': [
2040
+ (r'\b(false|true)\b', Keyword.Constant)
2041
+ ],
2042
+
2043
+ 'variable': [
2044
+ (r'[a-zA-Z_]\w*', Name.Variable),
2045
+ (r'(?<=\w)\.(?=\w)', Punctuation)
2046
+ ],
2047
+
2048
+ 'string': [
2049
+ (r"'[^']*'", String.Single),
2050
+ (r'"[^"]*"', String.Double)
2051
+ ],
2052
+
2053
+ 'number': [
2054
+ (r'\d+\.\d+', Number.Float),
2055
+ (r'\d+', Number.Integer)
2056
+ ],
2057
+
2058
+ 'generic': [ # decides for variable, string, keyword or number
2059
+ include('keyword'),
2060
+ include('string'),
2061
+ include('number'),
2062
+ include('variable')
2063
+ ],
2064
+
2065
+ 'whitespace': [
2066
+ (r'[ \t]+', Whitespace)
2067
+ ],
2068
+
2069
+ # states for builtin blocks
2070
+ 'comment': [
2071
+ (r'(\{%)(\s*)(endcomment)(\s*)(%\})',
2072
+ bygroups(Punctuation, Whitespace, Name.Tag, Whitespace,
2073
+ Punctuation), ('#pop', '#pop')),
2074
+ (r'.', Comment)
2075
+ ],
2076
+
2077
+ 'raw': [
2078
+ (r'[^{]+', Text),
2079
+ (r'(\{%)(\s*)(endraw)(\s*)(%\})',
2080
+ bygroups(Punctuation, Whitespace, Name.Tag, Whitespace,
2081
+ Punctuation), '#pop'),
2082
+ (r'\{', Text)
2083
+ ],
2084
+ }
2085
+
2086
+
2087
+ class TwigLexer(RegexLexer):
2088
+ """
2089
+ `Twig <http://twig.sensiolabs.org/>`_ template lexer.
2090
+
2091
+ It just highlights Twig code between the preprocessor directives,
2092
+ other data is left untouched by the lexer.
2093
+
2094
+ .. versionadded:: 2.0
2095
+ """
2096
+
2097
+ name = 'Twig'
2098
+ aliases = ['twig']
2099
+ mimetypes = ['application/x-twig']
2100
+
2101
+ flags = re.M | re.S
2102
+
2103
+ # Note that a backslash is included in the following two patterns
2104
+ # PHP uses a backslash as a namespace separator
2105
+ _ident_char = r'[\\\w-]|[^\x00-\x7f]'
2106
+ _ident_begin = r'(?:[\\_a-z]|[^\x00-\x7f])'
2107
+ _ident_end = r'(?:' + _ident_char + ')*'
2108
+ _ident_inner = _ident_begin + _ident_end
2109
+
2110
+ tokens = {
2111
+ 'root': [
2112
+ (r'[^{]+', Other),
2113
+ (r'\{\{', Comment.Preproc, 'var'),
2114
+ # twig comments
2115
+ (r'\{\#.*?\#\}', Comment),
2116
+ # raw twig blocks
2117
+ (r'(\{%)(-?\s*)(raw)(\s*-?)(%\})(.*?)'
2118
+ r'(\{%)(-?\s*)(endraw)(\s*-?)(%\})',
2119
+ bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
2120
+ Other, Comment.Preproc, Text, Keyword, Text,
2121
+ Comment.Preproc)),
2122
+ (r'(\{%)(-?\s*)(verbatim)(\s*-?)(%\})(.*?)'
2123
+ r'(\{%)(-?\s*)(endverbatim)(\s*-?)(%\})',
2124
+ bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
2125
+ Other, Comment.Preproc, Text, Keyword, Text,
2126
+ Comment.Preproc)),
2127
+ # filter blocks
2128
+ (r'(\{%%)(-?\s*)(filter)(\s+)(%s)' % _ident_inner,
2129
+ bygroups(Comment.Preproc, Text, Keyword, Text, Name.Function),
2130
+ 'tag'),
2131
+ (r'(\{%)(-?\s*)([a-zA-Z_]\w*)',
2132
+ bygroups(Comment.Preproc, Text, Keyword), 'tag'),
2133
+ (r'\{', Other),
2134
+ ],
2135
+ 'varnames': [
2136
+ (r'(\|)(\s*)(%s)' % _ident_inner,
2137
+ bygroups(Operator, Text, Name.Function)),
2138
+ (r'(is)(\s+)(not)?(\s*)(%s)' % _ident_inner,
2139
+ bygroups(Keyword, Text, Keyword, Text, Name.Function)),
2140
+ (r'(?i)(true|false|none|null)\b', Keyword.Pseudo),
2141
+ (r'(in|not|and|b-and|or|b-or|b-xor|is'
2142
+ r'if|elseif|else|import'
2143
+ r'constant|defined|divisibleby|empty|even|iterable|odd|sameas'
2144
+ r'matches|starts\s+with|ends\s+with)\b',
2145
+ Keyword),
2146
+ (r'(loop|block|parent)\b', Name.Builtin),
2147
+ (_ident_inner, Name.Variable),
2148
+ (r'\.' + _ident_inner, Name.Variable),
2149
+ (r'\.[0-9]+', Number),
2150
+ (r':?"(\\\\|\\"|[^"])*"', String.Double),
2151
+ (r":?'(\\\\|\\'|[^'])*'", String.Single),
2152
+ (r'([{}()\[\]+\-*/,:~%]|\.\.|\?|:|\*\*|\/\/|!=|[><=]=?)', Operator),
2153
+ (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
2154
+ r"0[xX][0-9a-fA-F]+[Ll]?", Number),
2155
+ ],
2156
+ 'var': [
2157
+ (r'\s+', Text),
2158
+ (r'(-?)(\}\})', bygroups(Text, Comment.Preproc), '#pop'),
2159
+ include('varnames')
2160
+ ],
2161
+ 'tag': [
2162
+ (r'\s+', Text),
2163
+ (r'(-?)(%\})', bygroups(Text, Comment.Preproc), '#pop'),
2164
+ include('varnames'),
2165
+ (r'.', Punctuation),
2166
+ ],
2167
+ }
2168
+
2169
+
2170
+ class TwigHtmlLexer(DelegatingLexer):
2171
+ """
2172
+ Subclass of the `TwigLexer` that highlights unlexed data with the
2173
+ `HtmlLexer`.
2174
+
2175
+ .. versionadded:: 2.0
2176
+ """
2177
+
2178
+ name = "HTML+Twig"
2179
+ aliases = ["html+twig"]
2180
+ filenames = ['*.twig']
2181
+ mimetypes = ['text/html+twig']
2182
+
2183
+ def __init__(self, **options):
2184
+ super().__init__(HtmlLexer, TwigLexer, **options)
2185
+
2186
+
2187
+ class Angular2Lexer(RegexLexer):
2188
+ """
2189
+ Generic
2190
+ `angular2 <http://victorsavkin.com/post/119943127151/angular-2-template-syntax>`_
2191
+ template lexer.
2192
+
2193
+ Highlights only the Angular template tags (stuff between `{{` and `}}` and
2194
+ special attributes: '(event)=', '[property]=', '[(twoWayBinding)]=').
2195
+ Everything else is left for a delegating lexer.
2196
+
2197
+ .. versionadded:: 2.1
2198
+ """
2199
+
2200
+ name = "Angular2"
2201
+ aliases = ['ng2']
2202
+
2203
+ tokens = {
2204
+ 'root': [
2205
+ (r'[^{([*#]+', Other),
2206
+
2207
+ # {{meal.name}}
2208
+ (r'(\{\{)(\s*)', bygroups(Comment.Preproc, Text), 'ngExpression'),
2209
+
2210
+ # (click)="deleteOrder()"; [value]="test"; [(twoWayTest)]="foo.bar"
2211
+ (r'([([]+)([\w:.-]+)([\])]+)(\s*)(=)(\s*)',
2212
+ bygroups(Punctuation, Name.Attribute, Punctuation, Text, Operator, Text),
2213
+ 'attr'),
2214
+ (r'([([]+)([\w:.-]+)([\])]+)(\s*)',
2215
+ bygroups(Punctuation, Name.Attribute, Punctuation, Text)),
2216
+
2217
+ # *ngIf="..."; #f="ngForm"
2218
+ (r'([*#])([\w:.-]+)(\s*)(=)(\s*)',
2219
+ bygroups(Punctuation, Name.Attribute, Text, Operator, Text), 'attr'),
2220
+ (r'([*#])([\w:.-]+)(\s*)',
2221
+ bygroups(Punctuation, Name.Attribute, Text)),
2222
+ ],
2223
+
2224
+ 'ngExpression': [
2225
+ (r'\s+(\|\s+)?', Text),
2226
+ (r'\}\}', Comment.Preproc, '#pop'),
2227
+
2228
+ # Literals
2229
+ (r':?(true|false)', String.Boolean),
2230
+ (r':?"(\\\\|\\"|[^"])*"', String.Double),
2231
+ (r":?'(\\\\|\\'|[^'])*'", String.Single),
2232
+ (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
2233
+ r"0[xX][0-9a-fA-F]+[Ll]?", Number),
2234
+
2235
+ # Variabletext
2236
+ (r'[a-zA-Z][\w-]*(\(.*\))?', Name.Variable),
2237
+ (r'\.[\w-]+(\(.*\))?', Name.Variable),
2238
+
2239
+ # inline If
2240
+ (r'(\?)(\s*)([^}\s]+)(\s*)(:)(\s*)([^}\s]+)(\s*)',
2241
+ bygroups(Operator, Text, String, Text, Operator, Text, String, Text)),
2242
+ ],
2243
+ 'attr': [
2244
+ ('".*?"', String, '#pop'),
2245
+ ("'.*?'", String, '#pop'),
2246
+ (r'[^\s>]+', String, '#pop'),
2247
+ ],
2248
+ }
2249
+
2250
+
2251
+ class Angular2HtmlLexer(DelegatingLexer):
2252
+ """
2253
+ Subclass of the `Angular2Lexer` that highlights unlexed data with the
2254
+ `HtmlLexer`.
2255
+
2256
+ .. versionadded:: 2.0
2257
+ """
2258
+
2259
+ name = "HTML + Angular2"
2260
+ aliases = ["html+ng2"]
2261
+ filenames = ['*.ng2']
2262
+
2263
+ def __init__(self, **options):
2264
+ super().__init__(HtmlLexer, Angular2Lexer, **options)