pygments.rb 1.2.1 → 2.2.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (386) hide show
  1. checksums.yaml +5 -5
  2. data/.github/dependabot.yml +13 -0
  3. data/.github/workflows/ci.yml +40 -0
  4. data/.github/workflows/release.yml +24 -0
  5. data/.gitignore +4 -6
  6. data/.rubocop.yml +16 -0
  7. data/CHANGELOG.adoc +137 -0
  8. data/Gemfile +3 -1
  9. data/LICENSE +1 -1
  10. data/README.adoc +159 -0
  11. data/Rakefile +8 -32
  12. data/bench.rb +14 -8
  13. data/lib/pygments.rb +16 -14
  14. data/lib/pygments/lexer.rb +87 -65
  15. data/lib/pygments/mentos.py +23 -110
  16. data/lib/pygments/popen.rb +188 -284
  17. data/lib/pygments/version.rb +2 -2
  18. data/pygments.rb.gemspec +20 -13
  19. data/test/test_pygments.rb +50 -110
  20. data/vendor/pygments-main/{AUTHORS → Pygments-2.8.1.dist-info/AUTHORS} +26 -3
  21. data/vendor/pygments-main/Pygments-2.8.1.dist-info/INSTALLER +1 -0
  22. data/vendor/pygments-main/{LICENSE → Pygments-2.8.1.dist-info/LICENSE} +1 -1
  23. data/vendor/pygments-main/Pygments-2.8.1.dist-info/METADATA +48 -0
  24. data/vendor/pygments-main/Pygments-2.8.1.dist-info/RECORD +494 -0
  25. data/vendor/pygments-main/Pygments-2.8.1.dist-info/REQUESTED +0 -0
  26. data/vendor/pygments-main/Pygments-2.8.1.dist-info/WHEEL +5 -0
  27. data/vendor/pygments-main/Pygments-2.8.1.dist-info/entry_points.txt +3 -0
  28. data/vendor/pygments-main/Pygments-2.8.1.dist-info/top_level.txt +1 -0
  29. data/vendor/pygments-main/bin/pygmentize +8 -0
  30. data/vendor/pygments-main/pygments/__init__.py +6 -12
  31. data/vendor/pygments-main/pygments/__main__.py +17 -0
  32. data/vendor/pygments-main/pygments/cmdline.py +213 -184
  33. data/vendor/pygments-main/pygments/console.py +6 -10
  34. data/vendor/pygments-main/pygments/filter.py +4 -7
  35. data/vendor/pygments-main/pygments/filters/__init__.py +609 -22
  36. data/vendor/pygments-main/pygments/formatter.py +4 -5
  37. data/vendor/pygments-main/pygments/formatters/__init__.py +10 -10
  38. data/vendor/pygments-main/pygments/formatters/_mapping.py +1 -4
  39. data/vendor/pygments-main/pygments/formatters/bbcode.py +1 -2
  40. data/vendor/pygments-main/pygments/formatters/html.py +242 -139
  41. data/vendor/pygments-main/pygments/formatters/img.py +91 -50
  42. data/vendor/pygments-main/pygments/formatters/irc.py +39 -40
  43. data/vendor/pygments-main/pygments/formatters/latex.py +62 -33
  44. data/vendor/pygments-main/pygments/formatters/other.py +18 -17
  45. data/vendor/pygments-main/pygments/formatters/rtf.py +29 -30
  46. data/vendor/pygments-main/pygments/formatters/svg.py +40 -5
  47. data/vendor/pygments-main/pygments/formatters/terminal.py +25 -32
  48. data/vendor/pygments-main/pygments/formatters/terminal256.py +45 -14
  49. data/vendor/pygments-main/pygments/lexer.py +47 -44
  50. data/vendor/pygments-main/pygments/lexers/__init__.py +341 -0
  51. data/vendor/pygments-main/pygments/lexers/_asy_builtins.py +1644 -0
  52. data/vendor/pygments-main/pygments/lexers/_cl_builtins.py +231 -0
  53. data/vendor/pygments-main/pygments/lexers/_cocoa_builtins.py +75 -0
  54. data/vendor/pygments-main/pygments/lexers/_csound_builtins.py +1724 -0
  55. data/vendor/pygments-main/pygments/lexers/_lasso_builtins.py +5326 -0
  56. data/vendor/pygments-main/pygments/lexers/_lua_builtins.py +292 -0
  57. data/vendor/pygments-main/pygments/lexers/_mapping.py +554 -0
  58. data/vendor/pygments-main/pygments/lexers/_mql_builtins.py +1171 -0
  59. data/vendor/pygments-main/pygments/lexers/_mysql_builtins.py +1281 -0
  60. data/vendor/pygments-main/pygments/lexers/_openedge_builtins.py +2600 -0
  61. data/vendor/pygments-main/pygments/lexers/_php_builtins.py +4752 -0
  62. data/vendor/pygments-main/pygments/lexers/_postgres_builtins.py +677 -0
  63. data/vendor/pygments-main/pygments/lexers/_scilab_builtins.py +3093 -0
  64. data/vendor/pygments-main/pygments/lexers/_sourcemod_builtins.py +1160 -0
  65. data/vendor/pygments-main/pygments/lexers/_stan_builtins.py +557 -0
  66. data/vendor/pygments-main/pygments/lexers/_stata_builtins.py +457 -0
  67. data/vendor/pygments-main/pygments/lexers/_tsql_builtins.py +1003 -0
  68. data/vendor/pygments-main/pygments/lexers/_usd_builtins.py +112 -0
  69. data/vendor/pygments-main/pygments/lexers/_vbscript_builtins.py +279 -0
  70. data/vendor/pygments-main/pygments/lexers/_vim_builtins.py +1938 -0
  71. data/vendor/pygments-main/pygments/lexers/actionscript.py +244 -0
  72. data/vendor/pygments-main/pygments/lexers/agile.py +23 -0
  73. data/vendor/pygments-main/pygments/lexers/algebra.py +239 -0
  74. data/vendor/pygments-main/pygments/lexers/ambient.py +75 -0
  75. data/vendor/pygments-main/pygments/lexers/amdgpu.py +48 -0
  76. data/vendor/pygments-main/pygments/lexers/ampl.py +86 -0
  77. data/vendor/pygments-main/pygments/lexers/apl.py +100 -0
  78. data/vendor/pygments-main/pygments/lexers/archetype.py +317 -0
  79. data/vendor/pygments-main/pygments/lexers/arrow.py +116 -0
  80. data/vendor/pygments-main/pygments/lexers/asm.py +1004 -0
  81. data/vendor/pygments-main/pygments/lexers/automation.py +379 -0
  82. data/vendor/pygments-main/pygments/lexers/bare.py +103 -0
  83. data/vendor/pygments-main/pygments/lexers/basic.py +661 -0
  84. data/vendor/pygments-main/pygments/lexers/bibtex.py +159 -0
  85. data/vendor/pygments-main/pygments/lexers/boa.py +101 -0
  86. data/vendor/pygments-main/pygments/lexers/business.py +626 -0
  87. data/vendor/pygments-main/pygments/lexers/c_cpp.py +342 -0
  88. data/vendor/pygments-main/pygments/lexers/c_like.py +565 -0
  89. data/vendor/pygments-main/pygments/lexers/capnproto.py +77 -0
  90. data/vendor/pygments-main/pygments/lexers/cddl.py +190 -0
  91. data/vendor/pygments-main/pygments/lexers/chapel.py +111 -0
  92. data/vendor/pygments-main/pygments/lexers/clean.py +178 -0
  93. data/vendor/pygments-main/pygments/lexers/compiled.py +33 -0
  94. data/vendor/pygments-main/pygments/lexers/configs.py +985 -0
  95. data/vendor/pygments-main/pygments/lexers/console.py +113 -0
  96. data/vendor/pygments-main/pygments/lexers/crystal.py +366 -0
  97. data/vendor/pygments-main/pygments/lexers/csound.py +466 -0
  98. data/vendor/pygments-main/pygments/lexers/css.py +694 -0
  99. data/vendor/pygments-main/pygments/lexers/d.py +255 -0
  100. data/vendor/pygments-main/pygments/lexers/dalvik.py +124 -0
  101. data/vendor/pygments-main/pygments/lexers/data.py +697 -0
  102. data/vendor/pygments-main/pygments/lexers/devicetree.py +108 -0
  103. data/vendor/pygments-main/pygments/lexers/diff.py +164 -0
  104. data/vendor/pygments-main/pygments/lexers/dotnet.py +706 -0
  105. data/vendor/pygments-main/pygments/lexers/dsls.py +959 -0
  106. data/vendor/pygments-main/pygments/lexers/dylan.py +286 -0
  107. data/vendor/pygments-main/pygments/lexers/ecl.py +137 -0
  108. data/vendor/pygments-main/pygments/lexers/eiffel.py +64 -0
  109. data/vendor/pygments-main/pygments/lexers/elm.py +120 -0
  110. data/vendor/pygments-main/pygments/lexers/email.py +150 -0
  111. data/vendor/pygments-main/pygments/lexers/erlang.py +529 -0
  112. data/vendor/pygments-main/pygments/lexers/esoteric.py +303 -0
  113. data/vendor/pygments-main/pygments/lexers/ezhil.py +76 -0
  114. data/vendor/pygments-main/pygments/lexers/factor.py +343 -0
  115. data/vendor/pygments-main/pygments/lexers/fantom.py +249 -0
  116. data/vendor/pygments-main/pygments/lexers/felix.py +272 -0
  117. data/vendor/pygments-main/pygments/lexers/floscript.py +82 -0
  118. data/vendor/pygments-main/pygments/lexers/forth.py +177 -0
  119. data/vendor/pygments-main/pygments/lexers/fortran.py +208 -0
  120. data/vendor/pygments-main/pygments/lexers/foxpro.py +427 -0
  121. data/vendor/pygments-main/pygments/lexers/freefem.py +897 -0
  122. data/vendor/pygments-main/pygments/lexers/functional.py +20 -0
  123. data/vendor/pygments-main/pygments/lexers/futhark.py +110 -0
  124. data/vendor/pygments-main/pygments/lexers/gdscript.py +345 -0
  125. data/vendor/pygments-main/pygments/lexers/go.py +100 -0
  126. data/vendor/pygments-main/pygments/lexers/grammar_notation.py +269 -0
  127. data/vendor/pygments-main/pygments/lexers/graph.py +84 -0
  128. data/vendor/pygments-main/pygments/lexers/graphics.py +799 -0
  129. data/vendor/pygments-main/pygments/lexers/graphviz.py +57 -0
  130. data/vendor/pygments-main/pygments/lexers/haskell.py +869 -0
  131. data/vendor/pygments-main/pygments/lexers/haxe.py +935 -0
  132. data/vendor/pygments-main/pygments/lexers/hdl.py +468 -0
  133. data/vendor/pygments-main/pygments/lexers/hexdump.py +102 -0
  134. data/vendor/pygments-main/pygments/lexers/html.py +613 -0
  135. data/vendor/pygments-main/pygments/lexers/idl.py +280 -0
  136. data/vendor/pygments-main/pygments/lexers/igor.py +419 -0
  137. data/vendor/pygments-main/pygments/lexers/inferno.py +95 -0
  138. data/vendor/pygments-main/pygments/lexers/installers.py +321 -0
  139. data/vendor/pygments-main/pygments/lexers/int_fiction.py +1367 -0
  140. data/vendor/pygments-main/pygments/lexers/iolang.py +62 -0
  141. data/vendor/pygments-main/pygments/lexers/j.py +145 -0
  142. data/vendor/pygments-main/pygments/lexers/javascript.py +1536 -0
  143. data/vendor/pygments-main/pygments/lexers/julia.py +330 -0
  144. data/vendor/pygments-main/pygments/lexers/jvm.py +1739 -0
  145. data/vendor/pygments-main/pygments/lexers/lisp.py +2698 -0
  146. data/vendor/pygments-main/pygments/lexers/make.py +205 -0
  147. data/vendor/pygments-main/pygments/lexers/markup.py +762 -0
  148. data/vendor/pygments-main/pygments/lexers/math.py +20 -0
  149. data/vendor/pygments-main/pygments/lexers/matlab.py +3280 -0
  150. data/vendor/pygments-main/pygments/lexers/mime.py +225 -0
  151. data/vendor/pygments-main/pygments/lexers/ml.py +957 -0
  152. data/vendor/pygments-main/pygments/lexers/modeling.py +365 -0
  153. data/vendor/pygments-main/pygments/lexers/modula2.py +1579 -0
  154. data/vendor/pygments-main/pygments/lexers/monte.py +203 -0
  155. data/vendor/pygments-main/pygments/lexers/mosel.py +447 -0
  156. data/vendor/pygments-main/pygments/lexers/ncl.py +893 -0
  157. data/vendor/pygments-main/pygments/lexers/nimrod.py +158 -0
  158. data/vendor/pygments-main/pygments/lexers/nit.py +63 -0
  159. data/vendor/pygments-main/pygments/lexers/nix.py +135 -0
  160. data/vendor/pygments-main/pygments/lexers/oberon.py +120 -0
  161. data/vendor/pygments-main/pygments/lexers/objective.py +503 -0
  162. data/vendor/pygments-main/pygments/lexers/ooc.py +84 -0
  163. data/vendor/pygments-main/pygments/lexers/other.py +40 -0
  164. data/vendor/pygments-main/pygments/lexers/parasail.py +78 -0
  165. data/vendor/pygments-main/pygments/lexers/parsers.py +799 -0
  166. data/vendor/pygments-main/pygments/lexers/pascal.py +643 -0
  167. data/vendor/pygments-main/pygments/lexers/pawn.py +202 -0
  168. data/vendor/pygments-main/pygments/lexers/perl.py +731 -0
  169. data/vendor/pygments-main/pygments/lexers/php.py +320 -0
  170. data/vendor/pygments-main/pygments/lexers/pointless.py +70 -0
  171. data/vendor/pygments-main/pygments/lexers/pony.py +93 -0
  172. data/vendor/pygments-main/pygments/lexers/praat.py +301 -0
  173. data/vendor/pygments-main/pygments/lexers/prolog.py +305 -0
  174. data/vendor/pygments-main/pygments/lexers/promql.py +182 -0
  175. data/vendor/pygments-main/pygments/lexers/python.py +1158 -0
  176. data/vendor/pygments-main/pygments/lexers/qvt.py +151 -0
  177. data/vendor/pygments-main/pygments/lexers/r.py +190 -0
  178. data/vendor/pygments-main/pygments/lexers/rdf.py +462 -0
  179. data/vendor/pygments-main/pygments/lexers/rebol.py +430 -0
  180. data/vendor/pygments-main/pygments/lexers/resource.py +84 -0
  181. data/vendor/pygments-main/pygments/lexers/ride.py +138 -0
  182. data/vendor/pygments-main/pygments/lexers/rnc.py +66 -0
  183. data/vendor/pygments-main/pygments/lexers/roboconf.py +81 -0
  184. data/vendor/pygments-main/pygments/lexers/robotframework.py +551 -0
  185. data/vendor/pygments-main/pygments/lexers/ruby.py +523 -0
  186. data/vendor/pygments-main/pygments/lexers/rust.py +223 -0
  187. data/vendor/pygments-main/pygments/lexers/sas.py +227 -0
  188. data/vendor/pygments-main/pygments/lexers/scdoc.py +82 -0
  189. data/vendor/pygments-main/pygments/lexers/scripting.py +1283 -0
  190. data/vendor/pygments-main/pygments/lexers/sgf.py +60 -0
  191. data/vendor/pygments-main/pygments/lexers/shell.py +909 -0
  192. data/vendor/pygments-main/pygments/lexers/sieve.py +68 -0
  193. data/vendor/pygments-main/pygments/lexers/slash.py +184 -0
  194. data/vendor/pygments-main/pygments/lexers/smalltalk.py +194 -0
  195. data/vendor/pygments-main/pygments/lexers/smv.py +78 -0
  196. data/vendor/pygments-main/pygments/lexers/snobol.py +82 -0
  197. data/vendor/pygments-main/pygments/lexers/solidity.py +91 -0
  198. data/vendor/pygments-main/pygments/lexers/special.py +106 -0
  199. data/vendor/pygments-main/pygments/lexers/sql.py +832 -0
  200. data/vendor/pygments-main/pygments/lexers/stata.py +170 -0
  201. data/vendor/pygments-main/pygments/lexers/supercollider.py +94 -0
  202. data/vendor/pygments-main/pygments/lexers/tcl.py +144 -0
  203. data/vendor/pygments-main/pygments/lexers/templates.py +2263 -0
  204. data/vendor/pygments-main/pygments/lexers/teraterm.py +334 -0
  205. data/vendor/pygments-main/pygments/lexers/testing.py +206 -0
  206. data/vendor/pygments-main/pygments/lexers/text.py +25 -0
  207. data/vendor/pygments-main/pygments/lexers/textedit.py +168 -0
  208. data/vendor/pygments-main/pygments/lexers/textfmts.py +429 -0
  209. data/vendor/pygments-main/pygments/lexers/theorem.py +476 -0
  210. data/vendor/pygments-main/pygments/lexers/tnt.py +262 -0
  211. data/vendor/pygments-main/pygments/lexers/trafficscript.py +53 -0
  212. data/vendor/pygments-main/pygments/lexers/typoscript.py +218 -0
  213. data/vendor/pygments-main/pygments/lexers/unicon.py +411 -0
  214. data/vendor/pygments-main/pygments/lexers/urbi.py +145 -0
  215. data/vendor/pygments-main/pygments/lexers/usd.py +89 -0
  216. data/vendor/pygments-main/pygments/lexers/varnish.py +189 -0
  217. data/vendor/pygments-main/pygments/lexers/verification.py +113 -0
  218. data/vendor/pygments-main/pygments/lexers/web.py +23 -0
  219. data/vendor/pygments-main/pygments/lexers/webidl.py +298 -0
  220. data/vendor/pygments-main/pygments/lexers/webmisc.py +991 -0
  221. data/vendor/pygments-main/pygments/lexers/whiley.py +115 -0
  222. data/vendor/pygments-main/pygments/lexers/x10.py +68 -0
  223. data/vendor/pygments-main/pygments/lexers/xorg.py +36 -0
  224. data/vendor/pygments-main/pygments/lexers/yang.py +103 -0
  225. data/vendor/pygments-main/pygments/lexers/zig.py +123 -0
  226. data/vendor/pygments-main/pygments/modeline.py +1 -2
  227. data/vendor/pygments-main/pygments/plugin.py +4 -3
  228. data/vendor/pygments-main/pygments/regexopt.py +1 -2
  229. data/vendor/pygments-main/pygments/scanner.py +2 -3
  230. data/vendor/pygments-main/pygments/sphinxext.py +2 -5
  231. data/vendor/pygments-main/pygments/style.py +61 -25
  232. data/vendor/pygments-main/pygments/styles/__init__.py +12 -5
  233. data/vendor/pygments-main/pygments/styles/abap.py +1 -2
  234. data/vendor/pygments-main/pygments/styles/algol.py +1 -2
  235. data/vendor/pygments-main/pygments/styles/algol_nu.py +1 -2
  236. data/vendor/pygments-main/pygments/styles/arduino.py +2 -3
  237. data/vendor/pygments-main/pygments/styles/autumn.py +1 -2
  238. data/vendor/pygments-main/pygments/styles/borland.py +1 -2
  239. data/vendor/pygments-main/pygments/styles/bw.py +1 -2
  240. data/vendor/pygments-main/pygments/styles/colorful.py +1 -2
  241. data/vendor/pygments-main/pygments/styles/default.py +1 -2
  242. data/vendor/pygments-main/pygments/styles/emacs.py +1 -2
  243. data/vendor/pygments-main/pygments/styles/friendly.py +1 -2
  244. data/vendor/pygments-main/pygments/styles/fruity.py +1 -2
  245. data/vendor/pygments-main/pygments/styles/igor.py +1 -2
  246. data/vendor/pygments-main/pygments/styles/inkpot.py +66 -0
  247. data/vendor/pygments-main/pygments/styles/lovelace.py +1 -2
  248. data/vendor/pygments-main/pygments/styles/manni.py +1 -2
  249. data/vendor/pygments-main/pygments/styles/material.py +118 -0
  250. data/vendor/pygments-main/pygments/styles/monokai.py +4 -4
  251. data/vendor/pygments-main/pygments/styles/murphy.py +1 -2
  252. data/vendor/pygments-main/pygments/styles/native.py +1 -2
  253. data/vendor/pygments-main/pygments/styles/paraiso_dark.py +1 -2
  254. data/vendor/pygments-main/pygments/styles/paraiso_light.py +1 -2
  255. data/vendor/pygments-main/pygments/styles/pastie.py +1 -2
  256. data/vendor/pygments-main/pygments/styles/perldoc.py +1 -2
  257. data/vendor/pygments-main/pygments/styles/rainbow_dash.py +1 -2
  258. data/vendor/pygments-main/pygments/styles/rrt.py +1 -2
  259. data/vendor/pygments-main/pygments/styles/sas.py +1 -2
  260. data/vendor/pygments-main/pygments/styles/solarized.py +136 -0
  261. data/vendor/pygments-main/pygments/styles/stata_dark.py +40 -0
  262. data/vendor/pygments-main/pygments/styles/{stata.py → stata_light.py} +14 -16
  263. data/vendor/pygments-main/pygments/styles/tango.py +1 -2
  264. data/vendor/pygments-main/pygments/styles/trac.py +1 -2
  265. data/vendor/pygments-main/pygments/styles/vim.py +1 -2
  266. data/vendor/pygments-main/pygments/styles/vs.py +1 -2
  267. data/vendor/pygments-main/pygments/styles/xcode.py +1 -2
  268. data/vendor/pygments-main/pygments/styles/zenburn.py +80 -0
  269. data/vendor/pygments-main/pygments/token.py +1 -2
  270. data/vendor/pygments-main/pygments/unistring.py +47 -109
  271. data/vendor/pygments-main/pygments/util.py +16 -96
  272. metadata +81 -139
  273. data/CHANGELOG.md +0 -111
  274. data/README.md +0 -121
  275. data/cache-lexers.rb +0 -8
  276. data/circle.yml +0 -20
  277. data/test/test_data.c +0 -2581
  278. data/test/test_data.py +0 -514
  279. data/test/test_data_generated +0 -2582
  280. data/vendor/custom_lexers/github.py +0 -565
  281. data/vendor/pygments-main/CHANGES +0 -1186
  282. data/vendor/pygments-main/MANIFEST.in +0 -6
  283. data/vendor/pygments-main/Makefile +0 -65
  284. data/vendor/pygments-main/README.rst +0 -39
  285. data/vendor/pygments-main/REVISION +0 -1
  286. data/vendor/pygments-main/TODO +0 -12
  287. data/vendor/pygments-main/doc/Makefile +0 -153
  288. data/vendor/pygments-main/doc/_static/favicon.ico +0 -0
  289. data/vendor/pygments-main/doc/_static/logo_new.png +0 -0
  290. data/vendor/pygments-main/doc/_static/logo_only.png +0 -0
  291. data/vendor/pygments-main/doc/_templates/docssidebar.html +0 -3
  292. data/vendor/pygments-main/doc/_templates/indexsidebar.html +0 -25
  293. data/vendor/pygments-main/doc/_themes/pygments14/layout.html +0 -98
  294. data/vendor/pygments-main/doc/_themes/pygments14/static/bodybg.png +0 -0
  295. data/vendor/pygments-main/doc/_themes/pygments14/static/docbg.png +0 -0
  296. data/vendor/pygments-main/doc/_themes/pygments14/static/listitem.png +0 -0
  297. data/vendor/pygments-main/doc/_themes/pygments14/static/logo.png +0 -0
  298. data/vendor/pygments-main/doc/_themes/pygments14/static/pocoo.png +0 -0
  299. data/vendor/pygments-main/doc/_themes/pygments14/static/pygments14.css_t +0 -401
  300. data/vendor/pygments-main/doc/_themes/pygments14/theme.conf +0 -15
  301. data/vendor/pygments-main/doc/conf.py +0 -241
  302. data/vendor/pygments-main/doc/docs/api.rst +0 -354
  303. data/vendor/pygments-main/doc/docs/authors.rst +0 -4
  304. data/vendor/pygments-main/doc/docs/changelog.rst +0 -1
  305. data/vendor/pygments-main/doc/docs/cmdline.rst +0 -166
  306. data/vendor/pygments-main/doc/docs/filterdevelopment.rst +0 -71
  307. data/vendor/pygments-main/doc/docs/filters.rst +0 -41
  308. data/vendor/pygments-main/doc/docs/formatterdevelopment.rst +0 -169
  309. data/vendor/pygments-main/doc/docs/formatters.rst +0 -48
  310. data/vendor/pygments-main/doc/docs/index.rst +0 -66
  311. data/vendor/pygments-main/doc/docs/integrate.rst +0 -40
  312. data/vendor/pygments-main/doc/docs/java.rst +0 -70
  313. data/vendor/pygments-main/doc/docs/lexerdevelopment.rst +0 -728
  314. data/vendor/pygments-main/doc/docs/lexers.rst +0 -69
  315. data/vendor/pygments-main/doc/docs/moinmoin.rst +0 -39
  316. data/vendor/pygments-main/doc/docs/plugins.rst +0 -93
  317. data/vendor/pygments-main/doc/docs/quickstart.rst +0 -205
  318. data/vendor/pygments-main/doc/docs/rstdirective.rst +0 -22
  319. data/vendor/pygments-main/doc/docs/styles.rst +0 -201
  320. data/vendor/pygments-main/doc/docs/tokens.rst +0 -372
  321. data/vendor/pygments-main/doc/docs/unicode.rst +0 -58
  322. data/vendor/pygments-main/doc/download.rst +0 -41
  323. data/vendor/pygments-main/doc/faq.rst +0 -139
  324. data/vendor/pygments-main/doc/index.rst +0 -54
  325. data/vendor/pygments-main/doc/languages.rst +0 -154
  326. data/vendor/pygments-main/doc/make.bat +0 -190
  327. data/vendor/pygments-main/doc/pygmentize.1 +0 -94
  328. data/vendor/pygments-main/external/autopygmentize +0 -101
  329. data/vendor/pygments-main/external/lasso-builtins-generator-9.lasso +0 -162
  330. data/vendor/pygments-main/external/markdown-processor.py +0 -67
  331. data/vendor/pygments-main/external/moin-parser.py +0 -112
  332. data/vendor/pygments-main/external/pygments.bashcomp +0 -38
  333. data/vendor/pygments-main/external/rst-directive.py +0 -82
  334. data/vendor/pygments-main/pygmentize +0 -8
  335. data/vendor/pygments-main/requirements.txt +0 -5
  336. data/vendor/pygments-main/scripts/check_sources.py +0 -211
  337. data/vendor/pygments-main/scripts/debug_lexer.py +0 -246
  338. data/vendor/pygments-main/scripts/detect_missing_analyse_text.py +0 -33
  339. data/vendor/pygments-main/scripts/epydoc.css +0 -280
  340. data/vendor/pygments-main/scripts/get_vimkw.py +0 -74
  341. data/vendor/pygments-main/scripts/pylintrc +0 -301
  342. data/vendor/pygments-main/scripts/vim2pygments.py +0 -935
  343. data/vendor/pygments-main/setup.cfg +0 -10
  344. data/vendor/pygments-main/setup.py +0 -77
  345. data/vendor/pygments-main/tox.ini +0 -7
  346. data/vendor/simplejson/.gitignore +0 -10
  347. data/vendor/simplejson/.travis.yml +0 -5
  348. data/vendor/simplejson/CHANGES.txt +0 -291
  349. data/vendor/simplejson/LICENSE.txt +0 -19
  350. data/vendor/simplejson/MANIFEST.in +0 -5
  351. data/vendor/simplejson/README.rst +0 -19
  352. data/vendor/simplejson/conf.py +0 -179
  353. data/vendor/simplejson/index.rst +0 -628
  354. data/vendor/simplejson/scripts/make_docs.py +0 -18
  355. data/vendor/simplejson/setup.py +0 -104
  356. data/vendor/simplejson/simplejson/__init__.py +0 -510
  357. data/vendor/simplejson/simplejson/_speedups.c +0 -2745
  358. data/vendor/simplejson/simplejson/decoder.py +0 -425
  359. data/vendor/simplejson/simplejson/encoder.py +0 -567
  360. data/vendor/simplejson/simplejson/ordered_dict.py +0 -119
  361. data/vendor/simplejson/simplejson/scanner.py +0 -77
  362. data/vendor/simplejson/simplejson/tests/__init__.py +0 -67
  363. data/vendor/simplejson/simplejson/tests/test_bigint_as_string.py +0 -55
  364. data/vendor/simplejson/simplejson/tests/test_check_circular.py +0 -30
  365. data/vendor/simplejson/simplejson/tests/test_decimal.py +0 -66
  366. data/vendor/simplejson/simplejson/tests/test_decode.py +0 -83
  367. data/vendor/simplejson/simplejson/tests/test_default.py +0 -9
  368. data/vendor/simplejson/simplejson/tests/test_dump.py +0 -67
  369. data/vendor/simplejson/simplejson/tests/test_encode_basestring_ascii.py +0 -46
  370. data/vendor/simplejson/simplejson/tests/test_encode_for_html.py +0 -32
  371. data/vendor/simplejson/simplejson/tests/test_errors.py +0 -34
  372. data/vendor/simplejson/simplejson/tests/test_fail.py +0 -91
  373. data/vendor/simplejson/simplejson/tests/test_float.py +0 -19
  374. data/vendor/simplejson/simplejson/tests/test_indent.py +0 -86
  375. data/vendor/simplejson/simplejson/tests/test_item_sort_key.py +0 -20
  376. data/vendor/simplejson/simplejson/tests/test_namedtuple.py +0 -121
  377. data/vendor/simplejson/simplejson/tests/test_pass1.py +0 -76
  378. data/vendor/simplejson/simplejson/tests/test_pass2.py +0 -14
  379. data/vendor/simplejson/simplejson/tests/test_pass3.py +0 -20
  380. data/vendor/simplejson/simplejson/tests/test_recursion.py +0 -67
  381. data/vendor/simplejson/simplejson/tests/test_scanstring.py +0 -117
  382. data/vendor/simplejson/simplejson/tests/test_separators.py +0 -42
  383. data/vendor/simplejson/simplejson/tests/test_speedups.py +0 -20
  384. data/vendor/simplejson/simplejson/tests/test_tuple.py +0 -49
  385. data/vendor/simplejson/simplejson/tests/test_unicode.py +0 -109
  386. data/vendor/simplejson/simplejson/tool.py +0 -39
@@ -0,0 +1,170 @@
1
+ """
2
+ pygments.lexers.stata
3
+ ~~~~~~~~~~~~~~~~~~~~~
4
+
5
+ Lexer for Stata
6
+
7
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
8
+ :license: BSD, see LICENSE for details.
9
+ """
10
+
11
+ import re
12
+ from pygments.lexer import RegexLexer, default, include, words
13
+ from pygments.token import Comment, Keyword, Name, Number, \
14
+ String, Text, Operator
15
+
16
+ from pygments.lexers._stata_builtins import builtins_base, builtins_functions
17
+
18
+ __all__ = ['StataLexer']
19
+
20
+
21
+ class StataLexer(RegexLexer):
22
+ """
23
+ For `Stata <http://www.stata.com/>`_ do files.
24
+
25
+ .. versionadded:: 2.2
26
+ """
27
+ # Syntax based on
28
+ # - http://fmwww.bc.edu/RePEc/bocode/s/synlightlist.ado
29
+ # - https://github.com/isagalaev/highlight.js/blob/master/src/languages/stata.js
30
+ # - https://github.com/jpitblado/vim-stata/blob/master/syntax/stata.vim
31
+
32
+ name = 'Stata'
33
+ aliases = ['stata', 'do']
34
+ filenames = ['*.do', '*.ado']
35
+ mimetypes = ['text/x-stata', 'text/stata', 'application/x-stata']
36
+ flags = re.MULTILINE | re.DOTALL
37
+
38
+ tokens = {
39
+ 'root': [
40
+ include('comments'),
41
+ include('strings'),
42
+ include('macros'),
43
+ include('numbers'),
44
+ include('keywords'),
45
+ include('operators'),
46
+ include('format'),
47
+ (r'.', Text),
48
+ ],
49
+ # Comments are a complicated beast in Stata because they can be
50
+ # nested and there are a few corner cases with that. See:
51
+ # - github.com/kylebarron/language-stata/issues/90
52
+ # - statalist.org/forums/forum/general-stata-discussion/general/1448244
53
+ 'comments': [
54
+ (r'(^//|(?<=\s)//)(?!/)', Comment.Single, 'comments-double-slash'),
55
+ (r'^\s*\*', Comment.Single, 'comments-star'),
56
+ (r'/\*', Comment.Multiline, 'comments-block'),
57
+ (r'(^///|(?<=\s)///)', Comment.Special, 'comments-triple-slash')
58
+ ],
59
+ 'comments-block': [
60
+ (r'/\*', Comment.Multiline, '#push'),
61
+ # this ends and restarts a comment block. but need to catch this so
62
+ # that it doesn\'t start _another_ level of comment blocks
63
+ (r'\*/\*', Comment.Multiline),
64
+ (r'(\*/\s+\*(?!/)[^\n]*)|(\*/)', Comment.Multiline, '#pop'),
65
+ # Match anything else as a character inside the comment
66
+ (r'.', Comment.Multiline),
67
+ ],
68
+ 'comments-star': [
69
+ (r'///.*?\n', Comment.Single,
70
+ ('#pop', 'comments-triple-slash')),
71
+ (r'(^//|(?<=\s)//)(?!/)', Comment.Single,
72
+ ('#pop', 'comments-double-slash')),
73
+ (r'/\*', Comment.Multiline, 'comments-block'),
74
+ (r'.(?=\n)', Comment.Single, '#pop'),
75
+ (r'.', Comment.Single),
76
+ ],
77
+ 'comments-triple-slash': [
78
+ (r'\n', Comment.Special, '#pop'),
79
+ # A // breaks out of a comment for the rest of the line
80
+ (r'//.*?(?=\n)', Comment.Single, '#pop'),
81
+ (r'.', Comment.Special),
82
+ ],
83
+ 'comments-double-slash': [
84
+ (r'\n', Text, '#pop'),
85
+ (r'.', Comment.Single),
86
+ ],
87
+ # `"compound string"' and regular "string"; note the former are
88
+ # nested.
89
+ 'strings': [
90
+ (r'`"', String, 'string-compound'),
91
+ (r'(?<!`)"', String, 'string-regular'),
92
+ ],
93
+ 'string-compound': [
94
+ (r'`"', String, '#push'),
95
+ (r'"\'', String, '#pop'),
96
+ (r'\\\\|\\"|\\\$|\\`|\\\n', String.Escape),
97
+ include('macros'),
98
+ (r'.', String)
99
+ ],
100
+ 'string-regular': [
101
+ (r'(")(?!\')|(?=\n)', String, '#pop'),
102
+ (r'\\\\|\\"|\\\$|\\`|\\\n', String.Escape),
103
+ include('macros'),
104
+ (r'.', String)
105
+ ],
106
+ # A local is usually
107
+ # `\w{0,31}'
108
+ # `:extended macro'
109
+ # `=expression'
110
+ # `[rsen](results)'
111
+ # `(++--)scalar(++--)'
112
+ #
113
+ # However, there are all sorts of weird rules wrt edge
114
+ # cases. Instead of writing 27 exceptions, anything inside
115
+ # `' is a local.
116
+ #
117
+ # A global is more restricted, so we do follow rules. Note only
118
+ # locals explicitly enclosed ${} can be nested.
119
+ 'macros': [
120
+ (r'\$(\{|(?=[$`]))', Name.Variable.Global, 'macro-global-nested'),
121
+ (r'\$', Name.Variable.Global, 'macro-global-name'),
122
+ (r'`', Name.Variable, 'macro-local'),
123
+ ],
124
+ 'macro-local': [
125
+ (r'`', Name.Variable, '#push'),
126
+ (r"'", Name.Variable, '#pop'),
127
+ (r'\$(\{|(?=[$`]))', Name.Variable.Global, 'macro-global-nested'),
128
+ (r'\$', Name.Variable.Global, 'macro-global-name'),
129
+ (r'.', Name.Variable), # fallback
130
+ ],
131
+ 'macro-global-nested': [
132
+ (r'\$(\{|(?=[$`]))', Name.Variable.Global, '#push'),
133
+ (r'\}', Name.Variable.Global, '#pop'),
134
+ (r'\$', Name.Variable.Global, 'macro-global-name'),
135
+ (r'`', Name.Variable, 'macro-local'),
136
+ (r'\w', Name.Variable.Global), # fallback
137
+ default('#pop'),
138
+ ],
139
+ 'macro-global-name': [
140
+ (r'\$(\{|(?=[$`]))', Name.Variable.Global, 'macro-global-nested', '#pop'),
141
+ (r'\$', Name.Variable.Global, 'macro-global-name', '#pop'),
142
+ (r'`', Name.Variable, 'macro-local', '#pop'),
143
+ (r'\w{1,32}', Name.Variable.Global, '#pop'),
144
+ ],
145
+ # Built in functions and statements
146
+ 'keywords': [
147
+ (words(builtins_functions, prefix = r'\b', suffix = r'(?=\()'),
148
+ Name.Function),
149
+ (words(builtins_base, prefix = r'(^\s*|\s)', suffix = r'\b'),
150
+ Keyword),
151
+ ],
152
+ # http://www.stata.com/help.cgi?operators
153
+ 'operators': [
154
+ (r'-|==|<=|>=|<|>|&|!=', Operator),
155
+ (r'\*|\+|\^|/|!|~|==|~=', Operator)
156
+ ],
157
+ # Stata numbers
158
+ 'numbers': [
159
+ # decimal number
160
+ (r'\b[+-]?([0-9]+(\.[0-9]+)?|\.[0-9]+|\.)([eE][+-]?[0-9]+)?[i]?\b',
161
+ Number),
162
+ ],
163
+ # Stata formats
164
+ 'format': [
165
+ (r'%-?\d{1,2}(\.\d{1,2})?[gfe]c?', Name.Other),
166
+ (r'%(21x|16H|16L|8H|8L)', Name.Other),
167
+ (r'%-?(tc|tC|td|tw|tm|tq|th|ty|tg)\S{0,32}', Name.Other),
168
+ (r'%[-~]?\d{1,4}s', Name.Other),
169
+ ]
170
+ }
@@ -0,0 +1,94 @@
1
+ """
2
+ pygments.lexers.supercollider
3
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
4
+
5
+ Lexer for SuperCollider
6
+
7
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
8
+ :license: BSD, see LICENSE for details.
9
+ """
10
+
11
+ import re
12
+
13
+ from pygments.lexer import RegexLexer, include, words, default
14
+ from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
15
+ Number, Punctuation
16
+
17
+ __all__ = ['SuperColliderLexer']
18
+
19
+
20
+ class SuperColliderLexer(RegexLexer):
21
+ """
22
+ For `SuperCollider <http://supercollider.github.io/>`_ source code.
23
+
24
+ .. versionadded:: 2.1
25
+ """
26
+
27
+ name = 'SuperCollider'
28
+ aliases = ['sc', 'supercollider']
29
+ filenames = ['*.sc', '*.scd']
30
+ mimetypes = ['application/supercollider', 'text/supercollider', ]
31
+
32
+ flags = re.DOTALL | re.MULTILINE
33
+ tokens = {
34
+ 'commentsandwhitespace': [
35
+ (r'\s+', Text),
36
+ (r'<!--', Comment),
37
+ (r'//.*?\n', Comment.Single),
38
+ (r'/\*.*?\*/', Comment.Multiline)
39
+ ],
40
+ 'slashstartsregex': [
41
+ include('commentsandwhitespace'),
42
+ (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
43
+ r'([gim]+\b|\B)', String.Regex, '#pop'),
44
+ (r'(?=/)', Text, ('#pop', 'badregex')),
45
+ default('#pop'),
46
+ ],
47
+ 'badregex': [
48
+ (r'\n', Text, '#pop')
49
+ ],
50
+ 'root': [
51
+ (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
52
+ include('commentsandwhitespace'),
53
+ (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
54
+ r'(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
55
+ (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
56
+ (r'[})\].]', Punctuation),
57
+ (words((
58
+ 'for', 'in', 'while', 'do', 'break', 'return', 'continue',
59
+ 'switch', 'case', 'default', 'if', 'else', 'throw', 'try',
60
+ 'catch', 'finally', 'new', 'delete', 'typeof', 'instanceof',
61
+ 'void'), suffix=r'\b'),
62
+ Keyword, 'slashstartsregex'),
63
+ (words(('var', 'let', 'with', 'function', 'arg'), suffix=r'\b'),
64
+ Keyword.Declaration, 'slashstartsregex'),
65
+ (words((
66
+ '(abstract', 'boolean', 'byte', 'char', 'class', 'const',
67
+ 'debugger', 'double', 'enum', 'export', 'extends', 'final',
68
+ 'float', 'goto', 'implements', 'import', 'int', 'interface',
69
+ 'long', 'native', 'package', 'private', 'protected', 'public',
70
+ 'short', 'static', 'super', 'synchronized', 'throws',
71
+ 'transient', 'volatile'), suffix=r'\b'),
72
+ Keyword.Reserved),
73
+ (words(('true', 'false', 'nil', 'inf'), suffix=r'\b'), Keyword.Constant),
74
+ (words((
75
+ 'Array', 'Boolean', 'Date', 'Error', 'Function', 'Number',
76
+ 'Object', 'Packages', 'RegExp', 'String',
77
+ 'isFinite', 'isNaN', 'parseFloat', 'parseInt', 'super',
78
+ 'thisFunctionDef', 'thisFunction', 'thisMethod', 'thisProcess',
79
+ 'thisThread', 'this'), suffix=r'\b'),
80
+ Name.Builtin),
81
+ (r'[$a-zA-Z_]\w*', Name.Other),
82
+ (r'\\?[$a-zA-Z_]\w*', String.Symbol),
83
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
84
+ (r'0x[0-9a-fA-F]+', Number.Hex),
85
+ (r'[0-9]+', Number.Integer),
86
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
87
+ (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
88
+ ]
89
+ }
90
+
91
+ def analyse_text(text):
92
+ """We're searching for a common function and a unique keyword here."""
93
+ if 'SinOsc' in text or 'thisFunctionDef' in text:
94
+ return 0.1
@@ -0,0 +1,144 @@
1
+ """
2
+ pygments.lexers.tcl
3
+ ~~~~~~~~~~~~~~~~~~~
4
+
5
+ Lexers for Tcl and related languages.
6
+
7
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
8
+ :license: BSD, see LICENSE for details.
9
+ """
10
+
11
+ from pygments.lexer import RegexLexer, include, words
12
+ from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
13
+ Number
14
+ from pygments.util import shebang_matches
15
+
16
+ __all__ = ['TclLexer']
17
+
18
+
19
+ class TclLexer(RegexLexer):
20
+ """
21
+ For Tcl source code.
22
+
23
+ .. versionadded:: 0.10
24
+ """
25
+
26
+ keyword_cmds_re = words((
27
+ 'after', 'apply', 'array', 'break', 'catch', 'continue', 'elseif', 'else', 'error',
28
+ 'eval', 'expr', 'for', 'foreach', 'global', 'if', 'namespace', 'proc', 'rename', 'return',
29
+ 'set', 'switch', 'then', 'trace', 'unset', 'update', 'uplevel', 'upvar', 'variable',
30
+ 'vwait', 'while'), prefix=r'\b', suffix=r'\b')
31
+
32
+ builtin_cmds_re = words((
33
+ 'append', 'bgerror', 'binary', 'cd', 'chan', 'clock', 'close', 'concat', 'dde', 'dict',
34
+ 'encoding', 'eof', 'exec', 'exit', 'fblocked', 'fconfigure', 'fcopy', 'file',
35
+ 'fileevent', 'flush', 'format', 'gets', 'glob', 'history', 'http', 'incr', 'info', 'interp',
36
+ 'join', 'lappend', 'lassign', 'lindex', 'linsert', 'list', 'llength', 'load', 'loadTk',
37
+ 'lrange', 'lrepeat', 'lreplace', 'lreverse', 'lsearch', 'lset', 'lsort', 'mathfunc',
38
+ 'mathop', 'memory', 'msgcat', 'open', 'package', 'pid', 'pkg::create', 'pkg_mkIndex',
39
+ 'platform', 'platform::shell', 'puts', 'pwd', 're_syntax', 'read', 'refchan',
40
+ 'regexp', 'registry', 'regsub', 'scan', 'seek', 'socket', 'source', 'split', 'string',
41
+ 'subst', 'tell', 'time', 'tm', 'unknown', 'unload'), prefix=r'\b', suffix=r'\b')
42
+
43
+ name = 'Tcl'
44
+ aliases = ['tcl']
45
+ filenames = ['*.tcl', '*.rvt']
46
+ mimetypes = ['text/x-tcl', 'text/x-script.tcl', 'application/x-tcl']
47
+
48
+ def _gen_command_rules(keyword_cmds_re, builtin_cmds_re, context=""):
49
+ return [
50
+ (keyword_cmds_re, Keyword, 'params' + context),
51
+ (builtin_cmds_re, Name.Builtin, 'params' + context),
52
+ (r'([\w.-]+)', Name.Variable, 'params' + context),
53
+ (r'#', Comment, 'comment'),
54
+ ]
55
+
56
+ tokens = {
57
+ 'root': [
58
+ include('command'),
59
+ include('basic'),
60
+ include('data'),
61
+ (r'\}', Keyword), # HACK: somehow we miscounted our braces
62
+ ],
63
+ 'command': _gen_command_rules(keyword_cmds_re, builtin_cmds_re),
64
+ 'command-in-brace': _gen_command_rules(keyword_cmds_re,
65
+ builtin_cmds_re,
66
+ "-in-brace"),
67
+ 'command-in-bracket': _gen_command_rules(keyword_cmds_re,
68
+ builtin_cmds_re,
69
+ "-in-bracket"),
70
+ 'command-in-paren': _gen_command_rules(keyword_cmds_re,
71
+ builtin_cmds_re,
72
+ "-in-paren"),
73
+ 'basic': [
74
+ (r'\(', Keyword, 'paren'),
75
+ (r'\[', Keyword, 'bracket'),
76
+ (r'\{', Keyword, 'brace'),
77
+ (r'"', String.Double, 'string'),
78
+ (r'(eq|ne|in|ni)\b', Operator.Word),
79
+ (r'!=|==|<<|>>|<=|>=|&&|\|\||\*\*|[-+~!*/%<>&^|?:]', Operator),
80
+ ],
81
+ 'data': [
82
+ (r'\s+', Text),
83
+ (r'0x[a-fA-F0-9]+', Number.Hex),
84
+ (r'0[0-7]+', Number.Oct),
85
+ (r'\d+\.\d+', Number.Float),
86
+ (r'\d+', Number.Integer),
87
+ (r'\$([\w.:-]+)', Name.Variable),
88
+ (r'([\w.:-]+)', Text),
89
+ ],
90
+ 'params': [
91
+ (r';', Keyword, '#pop'),
92
+ (r'\n', Text, '#pop'),
93
+ (r'(else|elseif|then)\b', Keyword),
94
+ include('basic'),
95
+ include('data'),
96
+ ],
97
+ 'params-in-brace': [
98
+ (r'\}', Keyword, ('#pop', '#pop')),
99
+ include('params')
100
+ ],
101
+ 'params-in-paren': [
102
+ (r'\)', Keyword, ('#pop', '#pop')),
103
+ include('params')
104
+ ],
105
+ 'params-in-bracket': [
106
+ (r'\]', Keyword, ('#pop', '#pop')),
107
+ include('params')
108
+ ],
109
+ 'string': [
110
+ (r'\[', String.Double, 'string-square'),
111
+ (r'(?s)(\\\\|\\[0-7]+|\\.|[^"\\])', String.Double),
112
+ (r'"', String.Double, '#pop')
113
+ ],
114
+ 'string-square': [
115
+ (r'\[', String.Double, 'string-square'),
116
+ (r'(?s)(\\\\|\\[0-7]+|\\.|\\\n|[^\]\\])', String.Double),
117
+ (r'\]', String.Double, '#pop')
118
+ ],
119
+ 'brace': [
120
+ (r'\}', Keyword, '#pop'),
121
+ include('command-in-brace'),
122
+ include('basic'),
123
+ include('data'),
124
+ ],
125
+ 'paren': [
126
+ (r'\)', Keyword, '#pop'),
127
+ include('command-in-paren'),
128
+ include('basic'),
129
+ include('data'),
130
+ ],
131
+ 'bracket': [
132
+ (r'\]', Keyword, '#pop'),
133
+ include('command-in-bracket'),
134
+ include('basic'),
135
+ include('data'),
136
+ ],
137
+ 'comment': [
138
+ (r'.*[^\\]\n', Comment, '#pop'),
139
+ (r'.*\\\n', Comment),
140
+ ],
141
+ }
142
+
143
+ def analyse_text(text):
144
+ return shebang_matches(text, r'(tcl)')
@@ -0,0 +1,2263 @@
1
+ """
2
+ pygments.lexers.templates
3
+ ~~~~~~~~~~~~~~~~~~~~~~~~~
4
+
5
+ Lexers for various template engines' markup.
6
+
7
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
8
+ :license: BSD, see LICENSE for details.
9
+ """
10
+
11
+ import re
12
+
13
+ from pygments.lexers.html import HtmlLexer, XmlLexer
14
+ from pygments.lexers.javascript import JavascriptLexer, LassoLexer
15
+ from pygments.lexers.css import CssLexer
16
+ from pygments.lexers.php import PhpLexer
17
+ from pygments.lexers.python import PythonLexer
18
+ from pygments.lexers.perl import PerlLexer
19
+ from pygments.lexers.jvm import JavaLexer, TeaLangLexer
20
+ from pygments.lexers.data import YamlLexer
21
+ from pygments.lexer import Lexer, DelegatingLexer, RegexLexer, bygroups, \
22
+ include, using, this, default, combined
23
+ from pygments.token import Error, Punctuation, Whitespace, \
24
+ Text, Comment, Operator, Keyword, Name, String, Number, Other, Token
25
+ from pygments.util import html_doctype_matches, looks_like_xml
26
+
27
+ __all__ = ['HtmlPhpLexer', 'XmlPhpLexer', 'CssPhpLexer',
28
+ 'JavascriptPhpLexer', 'ErbLexer', 'RhtmlLexer',
29
+ 'XmlErbLexer', 'CssErbLexer', 'JavascriptErbLexer',
30
+ 'SmartyLexer', 'HtmlSmartyLexer', 'XmlSmartyLexer',
31
+ 'CssSmartyLexer', 'JavascriptSmartyLexer', 'DjangoLexer',
32
+ 'HtmlDjangoLexer', 'CssDjangoLexer', 'XmlDjangoLexer',
33
+ 'JavascriptDjangoLexer', 'GenshiLexer', 'HtmlGenshiLexer',
34
+ 'GenshiTextLexer', 'CssGenshiLexer', 'JavascriptGenshiLexer',
35
+ 'MyghtyLexer', 'MyghtyHtmlLexer', 'MyghtyXmlLexer',
36
+ 'MyghtyCssLexer', 'MyghtyJavascriptLexer', 'MasonLexer', 'MakoLexer',
37
+ 'MakoHtmlLexer', 'MakoXmlLexer', 'MakoJavascriptLexer',
38
+ 'MakoCssLexer', 'JspLexer', 'CheetahLexer', 'CheetahHtmlLexer',
39
+ 'CheetahXmlLexer', 'CheetahJavascriptLexer', 'EvoqueLexer',
40
+ 'EvoqueHtmlLexer', 'EvoqueXmlLexer', 'ColdfusionLexer',
41
+ 'ColdfusionHtmlLexer', 'ColdfusionCFCLexer', 'VelocityLexer',
42
+ 'VelocityHtmlLexer', 'VelocityXmlLexer', 'SspLexer',
43
+ 'TeaTemplateLexer', 'LassoHtmlLexer', 'LassoXmlLexer',
44
+ 'LassoCssLexer', 'LassoJavascriptLexer', 'HandlebarsLexer',
45
+ 'HandlebarsHtmlLexer', 'YamlJinjaLexer', 'LiquidLexer',
46
+ 'TwigLexer', 'TwigHtmlLexer', 'Angular2Lexer', 'Angular2HtmlLexer']
47
+
48
+
49
+ class ErbLexer(Lexer):
50
+ """
51
+ Generic `ERB <http://ruby-doc.org/core/classes/ERB.html>`_ (Ruby Templating)
52
+ lexer.
53
+
54
+ Just highlights ruby code between the preprocessor directives, other data
55
+ is left untouched by the lexer.
56
+
57
+ All options are also forwarded to the `RubyLexer`.
58
+ """
59
+
60
+ name = 'ERB'
61
+ aliases = ['erb']
62
+ mimetypes = ['application/x-ruby-templating']
63
+
64
+ _block_re = re.compile(r'(<%%|%%>|<%=|<%#|<%-|<%|-%>|%>|^%[^%].*?$)', re.M)
65
+
66
+ def __init__(self, **options):
67
+ from pygments.lexers.ruby import RubyLexer
68
+ self.ruby_lexer = RubyLexer(**options)
69
+ Lexer.__init__(self, **options)
70
+
71
+ def get_tokens_unprocessed(self, text):
72
+ """
73
+ Since ERB doesn't allow "<%" and other tags inside of ruby
74
+ blocks we have to use a split approach here that fails for
75
+ that too.
76
+ """
77
+ tokens = self._block_re.split(text)
78
+ tokens.reverse()
79
+ state = idx = 0
80
+ try:
81
+ while True:
82
+ # text
83
+ if state == 0:
84
+ val = tokens.pop()
85
+ yield idx, Other, val
86
+ idx += len(val)
87
+ state = 1
88
+ # block starts
89
+ elif state == 1:
90
+ tag = tokens.pop()
91
+ # literals
92
+ if tag in ('<%%', '%%>'):
93
+ yield idx, Other, tag
94
+ idx += 3
95
+ state = 0
96
+ # comment
97
+ elif tag == '<%#':
98
+ yield idx, Comment.Preproc, tag
99
+ val = tokens.pop()
100
+ yield idx + 3, Comment, val
101
+ idx += 3 + len(val)
102
+ state = 2
103
+ # blocks or output
104
+ elif tag in ('<%', '<%=', '<%-'):
105
+ yield idx, Comment.Preproc, tag
106
+ idx += len(tag)
107
+ data = tokens.pop()
108
+ r_idx = 0
109
+ for r_idx, r_token, r_value in \
110
+ self.ruby_lexer.get_tokens_unprocessed(data):
111
+ yield r_idx + idx, r_token, r_value
112
+ idx += len(data)
113
+ state = 2
114
+ elif tag in ('%>', '-%>'):
115
+ yield idx, Error, tag
116
+ idx += len(tag)
117
+ state = 0
118
+ # % raw ruby statements
119
+ else:
120
+ yield idx, Comment.Preproc, tag[0]
121
+ r_idx = 0
122
+ for r_idx, r_token, r_value in \
123
+ self.ruby_lexer.get_tokens_unprocessed(tag[1:]):
124
+ yield idx + 1 + r_idx, r_token, r_value
125
+ idx += len(tag)
126
+ state = 0
127
+ # block ends
128
+ elif state == 2:
129
+ tag = tokens.pop()
130
+ if tag not in ('%>', '-%>'):
131
+ yield idx, Other, tag
132
+ else:
133
+ yield idx, Comment.Preproc, tag
134
+ idx += len(tag)
135
+ state = 0
136
+ except IndexError:
137
+ return
138
+
139
+ def analyse_text(text):
140
+ if '<%' in text and '%>' in text:
141
+ return 0.4
142
+
143
+
144
+ class SmartyLexer(RegexLexer):
145
+ """
146
+ Generic `Smarty <http://smarty.php.net/>`_ template lexer.
147
+
148
+ Just highlights smarty code between the preprocessor directives, other
149
+ data is left untouched by the lexer.
150
+ """
151
+
152
+ name = 'Smarty'
153
+ aliases = ['smarty']
154
+ filenames = ['*.tpl']
155
+ mimetypes = ['application/x-smarty']
156
+
157
+ flags = re.MULTILINE | re.DOTALL
158
+
159
+ tokens = {
160
+ 'root': [
161
+ (r'[^{]+', Other),
162
+ (r'(\{)(\*.*?\*)(\})',
163
+ bygroups(Comment.Preproc, Comment, Comment.Preproc)),
164
+ (r'(\{php\})(.*?)(\{/php\})',
165
+ bygroups(Comment.Preproc, using(PhpLexer, startinline=True),
166
+ Comment.Preproc)),
167
+ (r'(\{)(/?[a-zA-Z_]\w*)(\s*)',
168
+ bygroups(Comment.Preproc, Name.Function, Text), 'smarty'),
169
+ (r'\{', Comment.Preproc, 'smarty')
170
+ ],
171
+ 'smarty': [
172
+ (r'\s+', Text),
173
+ (r'\{', Comment.Preproc, '#push'),
174
+ (r'\}', Comment.Preproc, '#pop'),
175
+ (r'#[a-zA-Z_]\w*#', Name.Variable),
176
+ (r'\$[a-zA-Z_]\w*(\.\w+)*', Name.Variable),
177
+ (r'[~!%^&*()+=|\[\]:;,.<>/?@-]', Operator),
178
+ (r'(true|false|null)\b', Keyword.Constant),
179
+ (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
180
+ r"0[xX][0-9a-fA-F]+[Ll]?", Number),
181
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
182
+ (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
183
+ (r'[a-zA-Z_]\w*', Name.Attribute)
184
+ ]
185
+ }
186
+
187
+ def analyse_text(text):
188
+ rv = 0.0
189
+ if re.search(r'\{if\s+.*?\}.*?\{/if\}', text):
190
+ rv += 0.15
191
+ if re.search(r'\{include\s+file=.*?\}', text):
192
+ rv += 0.15
193
+ if re.search(r'\{foreach\s+.*?\}.*?\{/foreach\}', text):
194
+ rv += 0.15
195
+ if re.search(r'\{\$.*?\}', text):
196
+ rv += 0.01
197
+ return rv
198
+
199
+
200
+ class VelocityLexer(RegexLexer):
201
+ """
202
+ Generic `Velocity <http://velocity.apache.org/>`_ template lexer.
203
+
204
+ Just highlights velocity directives and variable references, other
205
+ data is left untouched by the lexer.
206
+ """
207
+
208
+ name = 'Velocity'
209
+ aliases = ['velocity']
210
+ filenames = ['*.vm', '*.fhtml']
211
+
212
+ flags = re.MULTILINE | re.DOTALL
213
+
214
+ identifier = r'[a-zA-Z_]\w*'
215
+
216
+ tokens = {
217
+ 'root': [
218
+ (r'[^{#$]+', Other),
219
+ (r'(#)(\*.*?\*)(#)',
220
+ bygroups(Comment.Preproc, Comment, Comment.Preproc)),
221
+ (r'(##)(.*?$)',
222
+ bygroups(Comment.Preproc, Comment)),
223
+ (r'(#\{?)(' + identifier + r')(\}?)(\s?\()',
224
+ bygroups(Comment.Preproc, Name.Function, Comment.Preproc, Punctuation),
225
+ 'directiveparams'),
226
+ (r'(#\{?)(' + identifier + r')(\}|\b)',
227
+ bygroups(Comment.Preproc, Name.Function, Comment.Preproc)),
228
+ (r'\$!?\{?', Punctuation, 'variable')
229
+ ],
230
+ 'variable': [
231
+ (identifier, Name.Variable),
232
+ (r'\(', Punctuation, 'funcparams'),
233
+ (r'(\.)(' + identifier + r')',
234
+ bygroups(Punctuation, Name.Variable), '#push'),
235
+ (r'\}', Punctuation, '#pop'),
236
+ default('#pop')
237
+ ],
238
+ 'directiveparams': [
239
+ (r'(&&|\|\||==?|!=?|[-<>+*%&|^/])|\b(eq|ne|gt|lt|ge|le|not|in)\b',
240
+ Operator),
241
+ (r'\[', Operator, 'rangeoperator'),
242
+ (r'\b' + identifier + r'\b', Name.Function),
243
+ include('funcparams')
244
+ ],
245
+ 'rangeoperator': [
246
+ (r'\.\.', Operator),
247
+ include('funcparams'),
248
+ (r'\]', Operator, '#pop')
249
+ ],
250
+ 'funcparams': [
251
+ (r'\$!?\{?', Punctuation, 'variable'),
252
+ (r'\s+', Text),
253
+ (r'[,:]', Punctuation),
254
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
255
+ (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
256
+ (r"0[xX][0-9a-fA-F]+[Ll]?", Number),
257
+ (r"\b[0-9]+\b", Number),
258
+ (r'(true|false|null)\b', Keyword.Constant),
259
+ (r'\(', Punctuation, '#push'),
260
+ (r'\)', Punctuation, '#pop'),
261
+ (r'\{', Punctuation, '#push'),
262
+ (r'\}', Punctuation, '#pop'),
263
+ (r'\[', Punctuation, '#push'),
264
+ (r'\]', Punctuation, '#pop'),
265
+ ]
266
+ }
267
+
268
+ def analyse_text(text):
269
+ rv = 0.0
270
+ if re.search(r'#\{?macro\}?\(.*?\).*?#\{?end\}?', text):
271
+ rv += 0.25
272
+ if re.search(r'#\{?if\}?\(.+?\).*?#\{?end\}?', text):
273
+ rv += 0.15
274
+ if re.search(r'#\{?foreach\}?\(.+?\).*?#\{?end\}?', text):
275
+ rv += 0.15
276
+ if re.search(r'\$!?\{?[a-zA-Z_]\w*(\([^)]*\))?'
277
+ r'(\.\w+(\([^)]*\))?)*\}?', text):
278
+ rv += 0.01
279
+ return rv
280
+
281
+
282
+ class VelocityHtmlLexer(DelegatingLexer):
283
+ """
284
+ Subclass of the `VelocityLexer` that highlights unlexed data
285
+ with the `HtmlLexer`.
286
+
287
+ """
288
+
289
+ name = 'HTML+Velocity'
290
+ aliases = ['html+velocity']
291
+ alias_filenames = ['*.html', '*.fhtml']
292
+ mimetypes = ['text/html+velocity']
293
+
294
+ def __init__(self, **options):
295
+ super().__init__(HtmlLexer, VelocityLexer, **options)
296
+
297
+
298
+ class VelocityXmlLexer(DelegatingLexer):
299
+ """
300
+ Subclass of the `VelocityLexer` that highlights unlexed data
301
+ with the `XmlLexer`.
302
+
303
+ """
304
+
305
+ name = 'XML+Velocity'
306
+ aliases = ['xml+velocity']
307
+ alias_filenames = ['*.xml', '*.vm']
308
+ mimetypes = ['application/xml+velocity']
309
+
310
+ def __init__(self, **options):
311
+ super().__init__(XmlLexer, VelocityLexer, **options)
312
+
313
+ def analyse_text(text):
314
+ rv = VelocityLexer.analyse_text(text) - 0.01
315
+ if looks_like_xml(text):
316
+ rv += 0.4
317
+ return rv
318
+
319
+
320
+ class DjangoLexer(RegexLexer):
321
+ """
322
+ Generic `django <http://www.djangoproject.com/documentation/templates/>`_
323
+ and `jinja <https://jinja.pocoo.org/jinja/>`_ template lexer.
324
+
325
+ It just highlights django/jinja code between the preprocessor directives,
326
+ other data is left untouched by the lexer.
327
+ """
328
+
329
+ name = 'Django/Jinja'
330
+ aliases = ['django', 'jinja']
331
+ mimetypes = ['application/x-django-templating', 'application/x-jinja']
332
+
333
+ flags = re.M | re.S
334
+
335
+ tokens = {
336
+ 'root': [
337
+ (r'[^{]+', Other),
338
+ (r'\{\{', Comment.Preproc, 'var'),
339
+ # jinja/django comments
340
+ (r'\{#.*?#\}', Comment),
341
+ # django comments
342
+ (r'(\{%)(-?\s*)(comment)(\s*-?)(%\})(.*?)'
343
+ r'(\{%)(-?\s*)(endcomment)(\s*-?)(%\})',
344
+ bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
345
+ Comment, Comment.Preproc, Text, Keyword, Text,
346
+ Comment.Preproc)),
347
+ # raw jinja blocks
348
+ (r'(\{%)(-?\s*)(raw)(\s*-?)(%\})(.*?)'
349
+ r'(\{%)(-?\s*)(endraw)(\s*-?)(%\})',
350
+ bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
351
+ Text, Comment.Preproc, Text, Keyword, Text,
352
+ Comment.Preproc)),
353
+ # filter blocks
354
+ (r'(\{%)(-?\s*)(filter)(\s+)([a-zA-Z_]\w*)',
355
+ bygroups(Comment.Preproc, Text, Keyword, Text, Name.Function),
356
+ 'block'),
357
+ (r'(\{%)(-?\s*)([a-zA-Z_]\w*)',
358
+ bygroups(Comment.Preproc, Text, Keyword), 'block'),
359
+ (r'\{', Other)
360
+ ],
361
+ 'varnames': [
362
+ (r'(\|)(\s*)([a-zA-Z_]\w*)',
363
+ bygroups(Operator, Text, Name.Function)),
364
+ (r'(is)(\s+)(not)?(\s+)?([a-zA-Z_]\w*)',
365
+ bygroups(Keyword, Text, Keyword, Text, Name.Function)),
366
+ (r'(_|true|false|none|True|False|None)\b', Keyword.Pseudo),
367
+ (r'(in|as|reversed|recursive|not|and|or|is|if|else|import|'
368
+ r'with(?:(?:out)?\s*context)?|scoped|ignore\s+missing)\b',
369
+ Keyword),
370
+ (r'(loop|block|super|forloop)\b', Name.Builtin),
371
+ (r'[a-zA-Z_][\w-]*', Name.Variable),
372
+ (r'\.\w+', Name.Variable),
373
+ (r':?"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
374
+ (r":?'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
375
+ (r'([{}()\[\]+\-*/%,:~]|[><=]=?|!=)', Operator),
376
+ (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
377
+ r"0[xX][0-9a-fA-F]+[Ll]?", Number),
378
+ ],
379
+ 'var': [
380
+ (r'\s+', Text),
381
+ (r'(-?)(\}\})', bygroups(Text, Comment.Preproc), '#pop'),
382
+ include('varnames')
383
+ ],
384
+ 'block': [
385
+ (r'\s+', Text),
386
+ (r'(-?)(%\})', bygroups(Text, Comment.Preproc), '#pop'),
387
+ include('varnames'),
388
+ (r'.', Punctuation)
389
+ ]
390
+ }
391
+
392
+ def analyse_text(text):
393
+ rv = 0.0
394
+ if re.search(r'\{%\s*(block|extends)', text) is not None:
395
+ rv += 0.4
396
+ if re.search(r'\{%\s*if\s*.*?%\}', text) is not None:
397
+ rv += 0.1
398
+ if re.search(r'\{\{.*?\}\}', text) is not None:
399
+ rv += 0.1
400
+ return rv
401
+
402
+
403
+ class MyghtyLexer(RegexLexer):
404
+ """
405
+ Generic `myghty templates`_ lexer. Code that isn't Myghty
406
+ markup is yielded as `Token.Other`.
407
+
408
+ .. versionadded:: 0.6
409
+
410
+ .. _myghty templates: http://www.myghty.org/
411
+ """
412
+
413
+ name = 'Myghty'
414
+ aliases = ['myghty']
415
+ filenames = ['*.myt', 'autodelegate']
416
+ mimetypes = ['application/x-myghty']
417
+
418
+ tokens = {
419
+ 'root': [
420
+ (r'\s+', Text),
421
+ (r'(?s)(<%(?:def|method))(\s*)(.*?)(>)(.*?)(</%\2\s*>)',
422
+ bygroups(Name.Tag, Text, Name.Function, Name.Tag,
423
+ using(this), Name.Tag)),
424
+ (r'(?s)(<%\w+)(.*?)(>)(.*?)(</%\2\s*>)',
425
+ bygroups(Name.Tag, Name.Function, Name.Tag,
426
+ using(PythonLexer), Name.Tag)),
427
+ (r'(<&[^|])(.*?)(,.*?)?(&>)',
428
+ bygroups(Name.Tag, Name.Function, using(PythonLexer), Name.Tag)),
429
+ (r'(?s)(<&\|)(.*?)(,.*?)?(&>)',
430
+ bygroups(Name.Tag, Name.Function, using(PythonLexer), Name.Tag)),
431
+ (r'</&>', Name.Tag),
432
+ (r'(?s)(<%!?)(.*?)(%>)',
433
+ bygroups(Name.Tag, using(PythonLexer), Name.Tag)),
434
+ (r'(?<=^)#[^\n]*(\n|\Z)', Comment),
435
+ (r'(?<=^)(%)([^\n]*)(\n|\Z)',
436
+ bygroups(Name.Tag, using(PythonLexer), Other)),
437
+ (r"""(?sx)
438
+ (.+?) # anything, followed by:
439
+ (?:
440
+ (?<=\n)(?=[%#]) | # an eval or comment line
441
+ (?=</?[%&]) | # a substitution or block or
442
+ # call start or end
443
+ # - don't consume
444
+ (\\\n) | # an escaped newline
445
+ \Z # end of string
446
+ )""", bygroups(Other, Operator)),
447
+ ]
448
+ }
449
+
450
+
451
+ class MyghtyHtmlLexer(DelegatingLexer):
452
+ """
453
+ Subclass of the `MyghtyLexer` that highlights unlexed data
454
+ with the `HtmlLexer`.
455
+
456
+ .. versionadded:: 0.6
457
+ """
458
+
459
+ name = 'HTML+Myghty'
460
+ aliases = ['html+myghty']
461
+ mimetypes = ['text/html+myghty']
462
+
463
+ def __init__(self, **options):
464
+ super().__init__(HtmlLexer, MyghtyLexer, **options)
465
+
466
+
467
+ class MyghtyXmlLexer(DelegatingLexer):
468
+ """
469
+ Subclass of the `MyghtyLexer` that highlights unlexed data
470
+ with the `XmlLexer`.
471
+
472
+ .. versionadded:: 0.6
473
+ """
474
+
475
+ name = 'XML+Myghty'
476
+ aliases = ['xml+myghty']
477
+ mimetypes = ['application/xml+myghty']
478
+
479
+ def __init__(self, **options):
480
+ super().__init__(XmlLexer, MyghtyLexer, **options)
481
+
482
+
483
+ class MyghtyJavascriptLexer(DelegatingLexer):
484
+ """
485
+ Subclass of the `MyghtyLexer` that highlights unlexed data
486
+ with the `JavascriptLexer`.
487
+
488
+ .. versionadded:: 0.6
489
+ """
490
+
491
+ name = 'JavaScript+Myghty'
492
+ aliases = ['js+myghty', 'javascript+myghty']
493
+ mimetypes = ['application/x-javascript+myghty',
494
+ 'text/x-javascript+myghty',
495
+ 'text/javascript+mygthy']
496
+
497
+ def __init__(self, **options):
498
+ super().__init__(JavascriptLexer, MyghtyLexer, **options)
499
+
500
+
501
+ class MyghtyCssLexer(DelegatingLexer):
502
+ """
503
+ Subclass of the `MyghtyLexer` that highlights unlexed data
504
+ with the `CssLexer`.
505
+
506
+ .. versionadded:: 0.6
507
+ """
508
+
509
+ name = 'CSS+Myghty'
510
+ aliases = ['css+myghty']
511
+ mimetypes = ['text/css+myghty']
512
+
513
+ def __init__(self, **options):
514
+ super().__init__(CssLexer, MyghtyLexer, **options)
515
+
516
+
517
+ class MasonLexer(RegexLexer):
518
+ """
519
+ Generic `mason templates`_ lexer. Stolen from Myghty lexer. Code that isn't
520
+ Mason markup is HTML.
521
+
522
+ .. _mason templates: http://www.masonhq.com/
523
+
524
+ .. versionadded:: 1.4
525
+ """
526
+ name = 'Mason'
527
+ aliases = ['mason']
528
+ filenames = ['*.m', '*.mhtml', '*.mc', '*.mi', 'autohandler', 'dhandler']
529
+ mimetypes = ['application/x-mason']
530
+
531
+ tokens = {
532
+ 'root': [
533
+ (r'\s+', Text),
534
+ (r'(?s)(<%doc>)(.*?)(</%doc>)',
535
+ bygroups(Name.Tag, Comment.Multiline, Name.Tag)),
536
+ (r'(?s)(<%(?:def|method))(\s*)(.*?)(>)(.*?)(</%\2\s*>)',
537
+ bygroups(Name.Tag, Text, Name.Function, Name.Tag,
538
+ using(this), Name.Tag)),
539
+ (r'(?s)(<%(\w+)(.*?)(>))(.*?)(</%\2\s*>)',
540
+ bygroups(Name.Tag, None, None, None, using(PerlLexer), Name.Tag)),
541
+ (r'(?s)(<&[^|])(.*?)(,.*?)?(&>)',
542
+ bygroups(Name.Tag, Name.Function, using(PerlLexer), Name.Tag)),
543
+ (r'(?s)(<&\|)(.*?)(,.*?)?(&>)',
544
+ bygroups(Name.Tag, Name.Function, using(PerlLexer), Name.Tag)),
545
+ (r'</&>', Name.Tag),
546
+ (r'(?s)(<%!?)(.*?)(%>)',
547
+ bygroups(Name.Tag, using(PerlLexer), Name.Tag)),
548
+ (r'(?<=^)#[^\n]*(\n|\Z)', Comment),
549
+ (r'(?<=^)(%)([^\n]*)(\n|\Z)',
550
+ bygroups(Name.Tag, using(PerlLexer), Other)),
551
+ (r"""(?sx)
552
+ (.+?) # anything, followed by:
553
+ (?:
554
+ (?<=\n)(?=[%#]) | # an eval or comment line
555
+ (?=</?[%&]) | # a substitution or block or
556
+ # call start or end
557
+ # - don't consume
558
+ (\\\n) | # an escaped newline
559
+ \Z # end of string
560
+ )""", bygroups(using(HtmlLexer), Operator)),
561
+ ]
562
+ }
563
+
564
+ def analyse_text(text):
565
+ result = 0.0
566
+ if re.search(r'</%(class|doc|init)>', text) is not None:
567
+ result = 1.0
568
+ elif re.search(r'<&.+&>', text, re.DOTALL) is not None:
569
+ result = 0.11
570
+ return result
571
+
572
+
573
+ class MakoLexer(RegexLexer):
574
+ """
575
+ Generic `mako templates`_ lexer. Code that isn't Mako
576
+ markup is yielded as `Token.Other`.
577
+
578
+ .. versionadded:: 0.7
579
+
580
+ .. _mako templates: http://www.makotemplates.org/
581
+ """
582
+
583
+ name = 'Mako'
584
+ aliases = ['mako']
585
+ filenames = ['*.mao']
586
+ mimetypes = ['application/x-mako']
587
+
588
+ tokens = {
589
+ 'root': [
590
+ (r'(\s*)(%)(\s*end(?:\w+))(\n|\Z)',
591
+ bygroups(Text, Comment.Preproc, Keyword, Other)),
592
+ (r'(\s*)(%)([^\n]*)(\n|\Z)',
593
+ bygroups(Text, Comment.Preproc, using(PythonLexer), Other)),
594
+ (r'(\s*)(##[^\n]*)(\n|\Z)',
595
+ bygroups(Text, Comment.Preproc, Other)),
596
+ (r'(?s)<%doc>.*?</%doc>', Comment.Preproc),
597
+ (r'(<%)([\w.:]+)',
598
+ bygroups(Comment.Preproc, Name.Builtin), 'tag'),
599
+ (r'(</%)([\w.:]+)(>)',
600
+ bygroups(Comment.Preproc, Name.Builtin, Comment.Preproc)),
601
+ (r'<%(?=([\w.:]+))', Comment.Preproc, 'ondeftags'),
602
+ (r'(?s)(<%(?:!?))(.*?)(%>)',
603
+ bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
604
+ (r'(\$\{)(.*?)(\})',
605
+ bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
606
+ (r'''(?sx)
607
+ (.+?) # anything, followed by:
608
+ (?:
609
+ (?<=\n)(?=%|\#\#) | # an eval or comment line
610
+ (?=\#\*) | # multiline comment
611
+ (?=</?%) | # a python block
612
+ # call start or end
613
+ (?=\$\{) | # a substitution
614
+ (?<=\n)(?=\s*%) |
615
+ # - don't consume
616
+ (\\\n) | # an escaped newline
617
+ \Z # end of string
618
+ )
619
+ ''', bygroups(Other, Operator)),
620
+ (r'\s+', Text),
621
+ ],
622
+ 'ondeftags': [
623
+ (r'<%', Comment.Preproc),
624
+ (r'(?<=<%)(include|inherit|namespace|page)', Name.Builtin),
625
+ include('tag'),
626
+ ],
627
+ 'tag': [
628
+ (r'((?:\w+)\s*=)(\s*)(".*?")',
629
+ bygroups(Name.Attribute, Text, String)),
630
+ (r'/?\s*>', Comment.Preproc, '#pop'),
631
+ (r'\s+', Text),
632
+ ],
633
+ 'attr': [
634
+ ('".*?"', String, '#pop'),
635
+ ("'.*?'", String, '#pop'),
636
+ (r'[^\s>]+', String, '#pop'),
637
+ ],
638
+ }
639
+
640
+
641
+ class MakoHtmlLexer(DelegatingLexer):
642
+ """
643
+ Subclass of the `MakoLexer` that highlights unlexed data
644
+ with the `HtmlLexer`.
645
+
646
+ .. versionadded:: 0.7
647
+ """
648
+
649
+ name = 'HTML+Mako'
650
+ aliases = ['html+mako']
651
+ mimetypes = ['text/html+mako']
652
+
653
+ def __init__(self, **options):
654
+ super().__init__(HtmlLexer, MakoLexer, **options)
655
+
656
+
657
+ class MakoXmlLexer(DelegatingLexer):
658
+ """
659
+ Subclass of the `MakoLexer` that highlights unlexed data
660
+ with the `XmlLexer`.
661
+
662
+ .. versionadded:: 0.7
663
+ """
664
+
665
+ name = 'XML+Mako'
666
+ aliases = ['xml+mako']
667
+ mimetypes = ['application/xml+mako']
668
+
669
+ def __init__(self, **options):
670
+ super().__init__(XmlLexer, MakoLexer, **options)
671
+
672
+
673
+ class MakoJavascriptLexer(DelegatingLexer):
674
+ """
675
+ Subclass of the `MakoLexer` that highlights unlexed data
676
+ with the `JavascriptLexer`.
677
+
678
+ .. versionadded:: 0.7
679
+ """
680
+
681
+ name = 'JavaScript+Mako'
682
+ aliases = ['js+mako', 'javascript+mako']
683
+ mimetypes = ['application/x-javascript+mako',
684
+ 'text/x-javascript+mako',
685
+ 'text/javascript+mako']
686
+
687
+ def __init__(self, **options):
688
+ super().__init__(JavascriptLexer, MakoLexer, **options)
689
+
690
+
691
+ class MakoCssLexer(DelegatingLexer):
692
+ """
693
+ Subclass of the `MakoLexer` that highlights unlexed data
694
+ with the `CssLexer`.
695
+
696
+ .. versionadded:: 0.7
697
+ """
698
+
699
+ name = 'CSS+Mako'
700
+ aliases = ['css+mako']
701
+ mimetypes = ['text/css+mako']
702
+
703
+ def __init__(self, **options):
704
+ super().__init__(CssLexer, MakoLexer, **options)
705
+
706
+
707
+ # Genshi and Cheetah lexers courtesy of Matt Good.
708
+
709
+ class CheetahPythonLexer(Lexer):
710
+ """
711
+ Lexer for handling Cheetah's special $ tokens in Python syntax.
712
+ """
713
+
714
+ def get_tokens_unprocessed(self, text):
715
+ pylexer = PythonLexer(**self.options)
716
+ for pos, type_, value in pylexer.get_tokens_unprocessed(text):
717
+ if type_ == Token.Error and value == '$':
718
+ type_ = Comment.Preproc
719
+ yield pos, type_, value
720
+
721
+
722
+ class CheetahLexer(RegexLexer):
723
+ """
724
+ Generic `cheetah templates`_ lexer. Code that isn't Cheetah
725
+ markup is yielded as `Token.Other`. This also works for
726
+ `spitfire templates`_ which use the same syntax.
727
+
728
+ .. _cheetah templates: http://www.cheetahtemplate.org/
729
+ .. _spitfire templates: http://code.google.com/p/spitfire/
730
+ """
731
+
732
+ name = 'Cheetah'
733
+ aliases = ['cheetah', 'spitfire']
734
+ filenames = ['*.tmpl', '*.spt']
735
+ mimetypes = ['application/x-cheetah', 'application/x-spitfire']
736
+
737
+ tokens = {
738
+ 'root': [
739
+ (r'(##[^\n]*)$',
740
+ (bygroups(Comment))),
741
+ (r'#[*](.|\n)*?[*]#', Comment),
742
+ (r'#end[^#\n]*(?:#|$)', Comment.Preproc),
743
+ (r'#slurp$', Comment.Preproc),
744
+ (r'(#[a-zA-Z]+)([^#\n]*)(#|$)',
745
+ (bygroups(Comment.Preproc, using(CheetahPythonLexer),
746
+ Comment.Preproc))),
747
+ # TODO support other Python syntax like $foo['bar']
748
+ (r'(\$)([a-zA-Z_][\w.]*\w)',
749
+ bygroups(Comment.Preproc, using(CheetahPythonLexer))),
750
+ (r'(?s)(\$\{!?)(.*?)(\})',
751
+ bygroups(Comment.Preproc, using(CheetahPythonLexer),
752
+ Comment.Preproc)),
753
+ (r'''(?sx)
754
+ (.+?) # anything, followed by:
755
+ (?:
756
+ (?=\#[#a-zA-Z]*) | # an eval comment
757
+ (?=\$[a-zA-Z_{]) | # a substitution
758
+ \Z # end of string
759
+ )
760
+ ''', Other),
761
+ (r'\s+', Text),
762
+ ],
763
+ }
764
+
765
+
766
+ class CheetahHtmlLexer(DelegatingLexer):
767
+ """
768
+ Subclass of the `CheetahLexer` that highlights unlexed data
769
+ with the `HtmlLexer`.
770
+ """
771
+
772
+ name = 'HTML+Cheetah'
773
+ aliases = ['html+cheetah', 'html+spitfire', 'htmlcheetah']
774
+ mimetypes = ['text/html+cheetah', 'text/html+spitfire']
775
+
776
+ def __init__(self, **options):
777
+ super().__init__(HtmlLexer, CheetahLexer, **options)
778
+
779
+
780
+ class CheetahXmlLexer(DelegatingLexer):
781
+ """
782
+ Subclass of the `CheetahLexer` that highlights unlexed data
783
+ with the `XmlLexer`.
784
+ """
785
+
786
+ name = 'XML+Cheetah'
787
+ aliases = ['xml+cheetah', 'xml+spitfire']
788
+ mimetypes = ['application/xml+cheetah', 'application/xml+spitfire']
789
+
790
+ def __init__(self, **options):
791
+ super().__init__(XmlLexer, CheetahLexer, **options)
792
+
793
+
794
+ class CheetahJavascriptLexer(DelegatingLexer):
795
+ """
796
+ Subclass of the `CheetahLexer` that highlights unlexed data
797
+ with the `JavascriptLexer`.
798
+ """
799
+
800
+ name = 'JavaScript+Cheetah'
801
+ aliases = ['js+cheetah', 'javascript+cheetah',
802
+ 'js+spitfire', 'javascript+spitfire']
803
+ mimetypes = ['application/x-javascript+cheetah',
804
+ 'text/x-javascript+cheetah',
805
+ 'text/javascript+cheetah',
806
+ 'application/x-javascript+spitfire',
807
+ 'text/x-javascript+spitfire',
808
+ 'text/javascript+spitfire']
809
+
810
+ def __init__(self, **options):
811
+ super().__init__(JavascriptLexer, CheetahLexer, **options)
812
+
813
+
814
+ class GenshiTextLexer(RegexLexer):
815
+ """
816
+ A lexer that highlights `genshi <http://genshi.edgewall.org/>`_ text
817
+ templates.
818
+ """
819
+
820
+ name = 'Genshi Text'
821
+ aliases = ['genshitext']
822
+ mimetypes = ['application/x-genshi-text', 'text/x-genshi']
823
+
824
+ tokens = {
825
+ 'root': [
826
+ (r'[^#$\s]+', Other),
827
+ (r'^(\s*)(##.*)$', bygroups(Text, Comment)),
828
+ (r'^(\s*)(#)', bygroups(Text, Comment.Preproc), 'directive'),
829
+ include('variable'),
830
+ (r'[#$\s]', Other),
831
+ ],
832
+ 'directive': [
833
+ (r'\n', Text, '#pop'),
834
+ (r'(?:def|for|if)\s+.*', using(PythonLexer), '#pop'),
835
+ (r'(choose|when|with)([^\S\n]+)(.*)',
836
+ bygroups(Keyword, Text, using(PythonLexer)), '#pop'),
837
+ (r'(choose|otherwise)\b', Keyword, '#pop'),
838
+ (r'(end\w*)([^\S\n]*)(.*)', bygroups(Keyword, Text, Comment), '#pop'),
839
+ ],
840
+ 'variable': [
841
+ (r'(?<!\$)(\$\{)(.+?)(\})',
842
+ bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
843
+ (r'(?<!\$)(\$)([a-zA-Z_][\w.]*)',
844
+ Name.Variable),
845
+ ]
846
+ }
847
+
848
+
849
+ class GenshiMarkupLexer(RegexLexer):
850
+ """
851
+ Base lexer for Genshi markup, used by `HtmlGenshiLexer` and
852
+ `GenshiLexer`.
853
+ """
854
+
855
+ flags = re.DOTALL
856
+
857
+ tokens = {
858
+ 'root': [
859
+ (r'[^<$]+', Other),
860
+ (r'(<\?python)(.*?)(\?>)',
861
+ bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
862
+ # yield style and script blocks as Other
863
+ (r'<\s*(script|style)\s*.*?>.*?<\s*/\1\s*>', Other),
864
+ (r'<\s*py:[a-zA-Z0-9]+', Name.Tag, 'pytag'),
865
+ (r'<\s*[a-zA-Z0-9:.]+', Name.Tag, 'tag'),
866
+ include('variable'),
867
+ (r'[<$]', Other),
868
+ ],
869
+ 'pytag': [
870
+ (r'\s+', Text),
871
+ (r'[\w:-]+\s*=', Name.Attribute, 'pyattr'),
872
+ (r'/?\s*>', Name.Tag, '#pop'),
873
+ ],
874
+ 'pyattr': [
875
+ ('(")(.*?)(")', bygroups(String, using(PythonLexer), String), '#pop'),
876
+ ("(')(.*?)(')", bygroups(String, using(PythonLexer), String), '#pop'),
877
+ (r'[^\s>]+', String, '#pop'),
878
+ ],
879
+ 'tag': [
880
+ (r'\s+', Text),
881
+ (r'py:[\w-]+\s*=', Name.Attribute, 'pyattr'),
882
+ (r'[\w:-]+\s*=', Name.Attribute, 'attr'),
883
+ (r'/?\s*>', Name.Tag, '#pop'),
884
+ ],
885
+ 'attr': [
886
+ ('"', String, 'attr-dstring'),
887
+ ("'", String, 'attr-sstring'),
888
+ (r'[^\s>]*', String, '#pop')
889
+ ],
890
+ 'attr-dstring': [
891
+ ('"', String, '#pop'),
892
+ include('strings'),
893
+ ("'", String)
894
+ ],
895
+ 'attr-sstring': [
896
+ ("'", String, '#pop'),
897
+ include('strings'),
898
+ ("'", String)
899
+ ],
900
+ 'strings': [
901
+ ('[^"\'$]+', String),
902
+ include('variable')
903
+ ],
904
+ 'variable': [
905
+ (r'(?<!\$)(\$\{)(.+?)(\})',
906
+ bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
907
+ (r'(?<!\$)(\$)([a-zA-Z_][\w\.]*)',
908
+ Name.Variable),
909
+ ]
910
+ }
911
+
912
+
913
+ class HtmlGenshiLexer(DelegatingLexer):
914
+ """
915
+ A lexer that highlights `genshi <http://genshi.edgewall.org/>`_ and
916
+ `kid <http://kid-templating.org/>`_ kid HTML templates.
917
+ """
918
+
919
+ name = 'HTML+Genshi'
920
+ aliases = ['html+genshi', 'html+kid']
921
+ alias_filenames = ['*.html', '*.htm', '*.xhtml']
922
+ mimetypes = ['text/html+genshi']
923
+
924
+ def __init__(self, **options):
925
+ super().__init__(HtmlLexer, GenshiMarkupLexer, **options)
926
+
927
+ def analyse_text(text):
928
+ rv = 0.0
929
+ if re.search(r'\$\{.*?\}', text) is not None:
930
+ rv += 0.2
931
+ if re.search(r'py:(.*?)=["\']', text) is not None:
932
+ rv += 0.2
933
+ return rv + HtmlLexer.analyse_text(text) - 0.01
934
+
935
+
936
+ class GenshiLexer(DelegatingLexer):
937
+ """
938
+ A lexer that highlights `genshi <http://genshi.edgewall.org/>`_ and
939
+ `kid <http://kid-templating.org/>`_ kid XML templates.
940
+ """
941
+
942
+ name = 'Genshi'
943
+ aliases = ['genshi', 'kid', 'xml+genshi', 'xml+kid']
944
+ filenames = ['*.kid']
945
+ alias_filenames = ['*.xml']
946
+ mimetypes = ['application/x-genshi', 'application/x-kid']
947
+
948
+ def __init__(self, **options):
949
+ super().__init__(XmlLexer, GenshiMarkupLexer, **options)
950
+
951
+ def analyse_text(text):
952
+ rv = 0.0
953
+ if re.search(r'\$\{.*?\}', text) is not None:
954
+ rv += 0.2
955
+ if re.search(r'py:(.*?)=["\']', text) is not None:
956
+ rv += 0.2
957
+ return rv + XmlLexer.analyse_text(text) - 0.01
958
+
959
+
960
+ class JavascriptGenshiLexer(DelegatingLexer):
961
+ """
962
+ A lexer that highlights javascript code in genshi text templates.
963
+ """
964
+
965
+ name = 'JavaScript+Genshi Text'
966
+ aliases = ['js+genshitext', 'js+genshi', 'javascript+genshitext',
967
+ 'javascript+genshi']
968
+ alias_filenames = ['*.js']
969
+ mimetypes = ['application/x-javascript+genshi',
970
+ 'text/x-javascript+genshi',
971
+ 'text/javascript+genshi']
972
+
973
+ def __init__(self, **options):
974
+ super().__init__(JavascriptLexer, GenshiTextLexer, **options)
975
+
976
+ def analyse_text(text):
977
+ return GenshiLexer.analyse_text(text) - 0.05
978
+
979
+
980
+ class CssGenshiLexer(DelegatingLexer):
981
+ """
982
+ A lexer that highlights CSS definitions in genshi text templates.
983
+ """
984
+
985
+ name = 'CSS+Genshi Text'
986
+ aliases = ['css+genshitext', 'css+genshi']
987
+ alias_filenames = ['*.css']
988
+ mimetypes = ['text/css+genshi']
989
+
990
+ def __init__(self, **options):
991
+ super().__init__(CssLexer, GenshiTextLexer, **options)
992
+
993
+ def analyse_text(text):
994
+ return GenshiLexer.analyse_text(text) - 0.05
995
+
996
+
997
+ class RhtmlLexer(DelegatingLexer):
998
+ """
999
+ Subclass of the ERB lexer that highlights the unlexed data with the
1000
+ html lexer.
1001
+
1002
+ Nested Javascript and CSS is highlighted too.
1003
+ """
1004
+
1005
+ name = 'RHTML'
1006
+ aliases = ['rhtml', 'html+erb', 'html+ruby']
1007
+ filenames = ['*.rhtml']
1008
+ alias_filenames = ['*.html', '*.htm', '*.xhtml']
1009
+ mimetypes = ['text/html+ruby']
1010
+
1011
+ def __init__(self, **options):
1012
+ super().__init__(HtmlLexer, ErbLexer, **options)
1013
+
1014
+ def analyse_text(text):
1015
+ rv = ErbLexer.analyse_text(text) - 0.01
1016
+ if html_doctype_matches(text):
1017
+ # one more than the XmlErbLexer returns
1018
+ rv += 0.5
1019
+ return rv
1020
+
1021
+
1022
+ class XmlErbLexer(DelegatingLexer):
1023
+ """
1024
+ Subclass of `ErbLexer` which highlights data outside preprocessor
1025
+ directives with the `XmlLexer`.
1026
+ """
1027
+
1028
+ name = 'XML+Ruby'
1029
+ aliases = ['xml+erb', 'xml+ruby']
1030
+ alias_filenames = ['*.xml']
1031
+ mimetypes = ['application/xml+ruby']
1032
+
1033
+ def __init__(self, **options):
1034
+ super().__init__(XmlLexer, ErbLexer, **options)
1035
+
1036
+ def analyse_text(text):
1037
+ rv = ErbLexer.analyse_text(text) - 0.01
1038
+ if looks_like_xml(text):
1039
+ rv += 0.4
1040
+ return rv
1041
+
1042
+
1043
+ class CssErbLexer(DelegatingLexer):
1044
+ """
1045
+ Subclass of `ErbLexer` which highlights unlexed data with the `CssLexer`.
1046
+ """
1047
+
1048
+ name = 'CSS+Ruby'
1049
+ aliases = ['css+erb', 'css+ruby']
1050
+ alias_filenames = ['*.css']
1051
+ mimetypes = ['text/css+ruby']
1052
+
1053
+ def __init__(self, **options):
1054
+ super().__init__(CssLexer, ErbLexer, **options)
1055
+
1056
+ def analyse_text(text):
1057
+ return ErbLexer.analyse_text(text) - 0.05
1058
+
1059
+
1060
+ class JavascriptErbLexer(DelegatingLexer):
1061
+ """
1062
+ Subclass of `ErbLexer` which highlights unlexed data with the
1063
+ `JavascriptLexer`.
1064
+ """
1065
+
1066
+ name = 'JavaScript+Ruby'
1067
+ aliases = ['js+erb', 'javascript+erb', 'js+ruby', 'javascript+ruby']
1068
+ alias_filenames = ['*.js']
1069
+ mimetypes = ['application/x-javascript+ruby',
1070
+ 'text/x-javascript+ruby',
1071
+ 'text/javascript+ruby']
1072
+
1073
+ def __init__(self, **options):
1074
+ super().__init__(JavascriptLexer, ErbLexer, **options)
1075
+
1076
+ def analyse_text(text):
1077
+ return ErbLexer.analyse_text(text) - 0.05
1078
+
1079
+
1080
+ class HtmlPhpLexer(DelegatingLexer):
1081
+ """
1082
+ Subclass of `PhpLexer` that highlights unhandled data with the `HtmlLexer`.
1083
+
1084
+ Nested Javascript and CSS is highlighted too.
1085
+ """
1086
+
1087
+ name = 'HTML+PHP'
1088
+ aliases = ['html+php']
1089
+ filenames = ['*.phtml']
1090
+ alias_filenames = ['*.php', '*.html', '*.htm', '*.xhtml',
1091
+ '*.php[345]']
1092
+ mimetypes = ['application/x-php',
1093
+ 'application/x-httpd-php', 'application/x-httpd-php3',
1094
+ 'application/x-httpd-php4', 'application/x-httpd-php5']
1095
+
1096
+ def __init__(self, **options):
1097
+ super().__init__(HtmlLexer, PhpLexer, **options)
1098
+
1099
+ def analyse_text(text):
1100
+ rv = PhpLexer.analyse_text(text) - 0.01
1101
+ if html_doctype_matches(text):
1102
+ rv += 0.5
1103
+ return rv
1104
+
1105
+
1106
+ class XmlPhpLexer(DelegatingLexer):
1107
+ """
1108
+ Subclass of `PhpLexer` that highlights unhandled data with the `XmlLexer`.
1109
+ """
1110
+
1111
+ name = 'XML+PHP'
1112
+ aliases = ['xml+php']
1113
+ alias_filenames = ['*.xml', '*.php', '*.php[345]']
1114
+ mimetypes = ['application/xml+php']
1115
+
1116
+ def __init__(self, **options):
1117
+ super().__init__(XmlLexer, PhpLexer, **options)
1118
+
1119
+ def analyse_text(text):
1120
+ rv = PhpLexer.analyse_text(text) - 0.01
1121
+ if looks_like_xml(text):
1122
+ rv += 0.4
1123
+ return rv
1124
+
1125
+
1126
+ class CssPhpLexer(DelegatingLexer):
1127
+ """
1128
+ Subclass of `PhpLexer` which highlights unmatched data with the `CssLexer`.
1129
+ """
1130
+
1131
+ name = 'CSS+PHP'
1132
+ aliases = ['css+php']
1133
+ alias_filenames = ['*.css']
1134
+ mimetypes = ['text/css+php']
1135
+
1136
+ def __init__(self, **options):
1137
+ super().__init__(CssLexer, PhpLexer, **options)
1138
+
1139
+ def analyse_text(text):
1140
+ return PhpLexer.analyse_text(text) - 0.05
1141
+
1142
+
1143
+ class JavascriptPhpLexer(DelegatingLexer):
1144
+ """
1145
+ Subclass of `PhpLexer` which highlights unmatched data with the
1146
+ `JavascriptLexer`.
1147
+ """
1148
+
1149
+ name = 'JavaScript+PHP'
1150
+ aliases = ['js+php', 'javascript+php']
1151
+ alias_filenames = ['*.js']
1152
+ mimetypes = ['application/x-javascript+php',
1153
+ 'text/x-javascript+php',
1154
+ 'text/javascript+php']
1155
+
1156
+ def __init__(self, **options):
1157
+ super().__init__(JavascriptLexer, PhpLexer, **options)
1158
+
1159
+ def analyse_text(text):
1160
+ return PhpLexer.analyse_text(text)
1161
+
1162
+
1163
+ class HtmlSmartyLexer(DelegatingLexer):
1164
+ """
1165
+ Subclass of the `SmartyLexer` that highlights unlexed data with the
1166
+ `HtmlLexer`.
1167
+
1168
+ Nested Javascript and CSS is highlighted too.
1169
+ """
1170
+
1171
+ name = 'HTML+Smarty'
1172
+ aliases = ['html+smarty']
1173
+ alias_filenames = ['*.html', '*.htm', '*.xhtml', '*.tpl']
1174
+ mimetypes = ['text/html+smarty']
1175
+
1176
+ def __init__(self, **options):
1177
+ super().__init__(HtmlLexer, SmartyLexer, **options)
1178
+
1179
+ def analyse_text(text):
1180
+ rv = SmartyLexer.analyse_text(text) - 0.01
1181
+ if html_doctype_matches(text):
1182
+ rv += 0.5
1183
+ return rv
1184
+
1185
+
1186
+ class XmlSmartyLexer(DelegatingLexer):
1187
+ """
1188
+ Subclass of the `SmartyLexer` that highlights unlexed data with the
1189
+ `XmlLexer`.
1190
+ """
1191
+
1192
+ name = 'XML+Smarty'
1193
+ aliases = ['xml+smarty']
1194
+ alias_filenames = ['*.xml', '*.tpl']
1195
+ mimetypes = ['application/xml+smarty']
1196
+
1197
+ def __init__(self, **options):
1198
+ super().__init__(XmlLexer, SmartyLexer, **options)
1199
+
1200
+ def analyse_text(text):
1201
+ rv = SmartyLexer.analyse_text(text) - 0.01
1202
+ if looks_like_xml(text):
1203
+ rv += 0.4
1204
+ return rv
1205
+
1206
+
1207
+ class CssSmartyLexer(DelegatingLexer):
1208
+ """
1209
+ Subclass of the `SmartyLexer` that highlights unlexed data with the
1210
+ `CssLexer`.
1211
+ """
1212
+
1213
+ name = 'CSS+Smarty'
1214
+ aliases = ['css+smarty']
1215
+ alias_filenames = ['*.css', '*.tpl']
1216
+ mimetypes = ['text/css+smarty']
1217
+
1218
+ def __init__(self, **options):
1219
+ super().__init__(CssLexer, SmartyLexer, **options)
1220
+
1221
+ def analyse_text(text):
1222
+ return SmartyLexer.analyse_text(text) - 0.05
1223
+
1224
+
1225
+ class JavascriptSmartyLexer(DelegatingLexer):
1226
+ """
1227
+ Subclass of the `SmartyLexer` that highlights unlexed data with the
1228
+ `JavascriptLexer`.
1229
+ """
1230
+
1231
+ name = 'JavaScript+Smarty'
1232
+ aliases = ['js+smarty', 'javascript+smarty']
1233
+ alias_filenames = ['*.js', '*.tpl']
1234
+ mimetypes = ['application/x-javascript+smarty',
1235
+ 'text/x-javascript+smarty',
1236
+ 'text/javascript+smarty']
1237
+
1238
+ def __init__(self, **options):
1239
+ super().__init__(JavascriptLexer, SmartyLexer, **options)
1240
+
1241
+ def analyse_text(text):
1242
+ return SmartyLexer.analyse_text(text) - 0.05
1243
+
1244
+
1245
+ class HtmlDjangoLexer(DelegatingLexer):
1246
+ """
1247
+ Subclass of the `DjangoLexer` that highlights unlexed data with the
1248
+ `HtmlLexer`.
1249
+
1250
+ Nested Javascript and CSS is highlighted too.
1251
+ """
1252
+
1253
+ name = 'HTML+Django/Jinja'
1254
+ aliases = ['html+django', 'html+jinja', 'htmldjango']
1255
+ alias_filenames = ['*.html', '*.htm', '*.xhtml']
1256
+ mimetypes = ['text/html+django', 'text/html+jinja']
1257
+
1258
+ def __init__(self, **options):
1259
+ super().__init__(HtmlLexer, DjangoLexer, **options)
1260
+
1261
+ def analyse_text(text):
1262
+ rv = DjangoLexer.analyse_text(text) - 0.01
1263
+ if html_doctype_matches(text):
1264
+ rv += 0.5
1265
+ return rv
1266
+
1267
+
1268
+ class XmlDjangoLexer(DelegatingLexer):
1269
+ """
1270
+ Subclass of the `DjangoLexer` that highlights unlexed data with the
1271
+ `XmlLexer`.
1272
+ """
1273
+
1274
+ name = 'XML+Django/Jinja'
1275
+ aliases = ['xml+django', 'xml+jinja']
1276
+ alias_filenames = ['*.xml']
1277
+ mimetypes = ['application/xml+django', 'application/xml+jinja']
1278
+
1279
+ def __init__(self, **options):
1280
+ super().__init__(XmlLexer, DjangoLexer, **options)
1281
+
1282
+ def analyse_text(text):
1283
+ rv = DjangoLexer.analyse_text(text) - 0.01
1284
+ if looks_like_xml(text):
1285
+ rv += 0.4
1286
+ return rv
1287
+
1288
+
1289
+ class CssDjangoLexer(DelegatingLexer):
1290
+ """
1291
+ Subclass of the `DjangoLexer` that highlights unlexed data with the
1292
+ `CssLexer`.
1293
+ """
1294
+
1295
+ name = 'CSS+Django/Jinja'
1296
+ aliases = ['css+django', 'css+jinja']
1297
+ alias_filenames = ['*.css']
1298
+ mimetypes = ['text/css+django', 'text/css+jinja']
1299
+
1300
+ def __init__(self, **options):
1301
+ super().__init__(CssLexer, DjangoLexer, **options)
1302
+
1303
+ def analyse_text(text):
1304
+ return DjangoLexer.analyse_text(text) - 0.05
1305
+
1306
+
1307
+ class JavascriptDjangoLexer(DelegatingLexer):
1308
+ """
1309
+ Subclass of the `DjangoLexer` that highlights unlexed data with the
1310
+ `JavascriptLexer`.
1311
+ """
1312
+
1313
+ name = 'JavaScript+Django/Jinja'
1314
+ aliases = ['js+django', 'javascript+django',
1315
+ 'js+jinja', 'javascript+jinja']
1316
+ alias_filenames = ['*.js']
1317
+ mimetypes = ['application/x-javascript+django',
1318
+ 'application/x-javascript+jinja',
1319
+ 'text/x-javascript+django',
1320
+ 'text/x-javascript+jinja',
1321
+ 'text/javascript+django',
1322
+ 'text/javascript+jinja']
1323
+
1324
+ def __init__(self, **options):
1325
+ super().__init__(JavascriptLexer, DjangoLexer, **options)
1326
+
1327
+ def analyse_text(text):
1328
+ return DjangoLexer.analyse_text(text) - 0.05
1329
+
1330
+
1331
+ class JspRootLexer(RegexLexer):
1332
+ """
1333
+ Base for the `JspLexer`. Yields `Token.Other` for area outside of
1334
+ JSP tags.
1335
+
1336
+ .. versionadded:: 0.7
1337
+ """
1338
+
1339
+ tokens = {
1340
+ 'root': [
1341
+ (r'<%\S?', Keyword, 'sec'),
1342
+ # FIXME: I want to make these keywords but still parse attributes.
1343
+ (r'</?jsp:(forward|getProperty|include|plugin|setProperty|useBean).*?>',
1344
+ Keyword),
1345
+ (r'[^<]+', Other),
1346
+ (r'<', Other),
1347
+ ],
1348
+ 'sec': [
1349
+ (r'%>', Keyword, '#pop'),
1350
+ # note: '\w\W' != '.' without DOTALL.
1351
+ (r'[\w\W]+?(?=%>|\Z)', using(JavaLexer)),
1352
+ ],
1353
+ }
1354
+
1355
+
1356
+ class JspLexer(DelegatingLexer):
1357
+ """
1358
+ Lexer for Java Server Pages.
1359
+
1360
+ .. versionadded:: 0.7
1361
+ """
1362
+ name = 'Java Server Page'
1363
+ aliases = ['jsp']
1364
+ filenames = ['*.jsp']
1365
+ mimetypes = ['application/x-jsp']
1366
+
1367
+ def __init__(self, **options):
1368
+ super().__init__(XmlLexer, JspRootLexer, **options)
1369
+
1370
+ def analyse_text(text):
1371
+ rv = JavaLexer.analyse_text(text) - 0.01
1372
+ if looks_like_xml(text):
1373
+ rv += 0.4
1374
+ if '<%' in text and '%>' in text:
1375
+ rv += 0.1
1376
+ return rv
1377
+
1378
+
1379
+ class EvoqueLexer(RegexLexer):
1380
+ """
1381
+ For files using the Evoque templating system.
1382
+
1383
+ .. versionadded:: 1.1
1384
+ """
1385
+ name = 'Evoque'
1386
+ aliases = ['evoque']
1387
+ filenames = ['*.evoque']
1388
+ mimetypes = ['application/x-evoque']
1389
+
1390
+ flags = re.DOTALL
1391
+
1392
+ tokens = {
1393
+ 'root': [
1394
+ (r'[^#$]+', Other),
1395
+ (r'#\[', Comment.Multiline, 'comment'),
1396
+ (r'\$\$', Other),
1397
+ # svn keywords
1398
+ (r'\$\w+:[^$\n]*\$', Comment.Multiline),
1399
+ # directives: begin, end
1400
+ (r'(\$)(begin|end)(\{(%)?)(.*?)((?(4)%)\})',
1401
+ bygroups(Punctuation, Name.Builtin, Punctuation, None,
1402
+ String, Punctuation)),
1403
+ # directives: evoque, overlay
1404
+ # see doc for handling first name arg: /directives/evoque/
1405
+ # + minor inconsistency: the "name" in e.g. $overlay{name=site_base}
1406
+ # should be using(PythonLexer), not passed out as String
1407
+ (r'(\$)(evoque|overlay)(\{(%)?)(\s*[#\w\-"\'.]+)?'
1408
+ r'(.*?)((?(4)%)\})',
1409
+ bygroups(Punctuation, Name.Builtin, Punctuation, None,
1410
+ String, using(PythonLexer), Punctuation)),
1411
+ # directives: if, for, prefer, test
1412
+ (r'(\$)(\w+)(\{(%)?)(.*?)((?(4)%)\})',
1413
+ bygroups(Punctuation, Name.Builtin, Punctuation, None,
1414
+ using(PythonLexer), Punctuation)),
1415
+ # directive clauses (no {} expression)
1416
+ (r'(\$)(else|rof|fi)', bygroups(Punctuation, Name.Builtin)),
1417
+ # expressions
1418
+ (r'(\$\{(%)?)(.*?)((!)(.*?))?((?(2)%)\})',
1419
+ bygroups(Punctuation, None, using(PythonLexer),
1420
+ Name.Builtin, None, None, Punctuation)),
1421
+ (r'#', Other),
1422
+ ],
1423
+ 'comment': [
1424
+ (r'[^\]#]', Comment.Multiline),
1425
+ (r'#\[', Comment.Multiline, '#push'),
1426
+ (r'\]#', Comment.Multiline, '#pop'),
1427
+ (r'[\]#]', Comment.Multiline)
1428
+ ],
1429
+ }
1430
+
1431
+ def analyse_text(text):
1432
+ """Evoque templates use $evoque, which is unique."""
1433
+ if '$evoque' in text:
1434
+ return 1
1435
+
1436
+ class EvoqueHtmlLexer(DelegatingLexer):
1437
+ """
1438
+ Subclass of the `EvoqueLexer` that highlights unlexed data with the
1439
+ `HtmlLexer`.
1440
+
1441
+ .. versionadded:: 1.1
1442
+ """
1443
+ name = 'HTML+Evoque'
1444
+ aliases = ['html+evoque']
1445
+ filenames = ['*.html']
1446
+ mimetypes = ['text/html+evoque']
1447
+
1448
+ def __init__(self, **options):
1449
+ super().__init__(HtmlLexer, EvoqueLexer, **options)
1450
+
1451
+ def analyse_text(text):
1452
+ return EvoqueLexer.analyse_text(text)
1453
+
1454
+
1455
+ class EvoqueXmlLexer(DelegatingLexer):
1456
+ """
1457
+ Subclass of the `EvoqueLexer` that highlights unlexed data with the
1458
+ `XmlLexer`.
1459
+
1460
+ .. versionadded:: 1.1
1461
+ """
1462
+ name = 'XML+Evoque'
1463
+ aliases = ['xml+evoque']
1464
+ filenames = ['*.xml']
1465
+ mimetypes = ['application/xml+evoque']
1466
+
1467
+ def __init__(self, **options):
1468
+ super().__init__(XmlLexer, EvoqueLexer, **options)
1469
+
1470
+ def analyse_text(text):
1471
+ return EvoqueLexer.analyse_text(text)
1472
+
1473
+
1474
+ class ColdfusionLexer(RegexLexer):
1475
+ """
1476
+ Coldfusion statements
1477
+ """
1478
+ name = 'cfstatement'
1479
+ aliases = ['cfs']
1480
+ filenames = []
1481
+ mimetypes = []
1482
+ flags = re.IGNORECASE
1483
+
1484
+ tokens = {
1485
+ 'root': [
1486
+ (r'//.*?\n', Comment.Single),
1487
+ (r'/\*(?:.|\n)*?\*/', Comment.Multiline),
1488
+ (r'\+\+|--', Operator),
1489
+ (r'[-+*/^&=!]', Operator),
1490
+ (r'<=|>=|<|>|==', Operator),
1491
+ (r'mod\b', Operator),
1492
+ (r'(eq|lt|gt|lte|gte|not|is|and|or)\b', Operator),
1493
+ (r'\|\||&&', Operator),
1494
+ (r'\?', Operator),
1495
+ (r'"', String.Double, 'string'),
1496
+ # There is a special rule for allowing html in single quoted
1497
+ # strings, evidently.
1498
+ (r"'.*?'", String.Single),
1499
+ (r'\d+', Number),
1500
+ (r'(if|else|len|var|xml|default|break|switch|component|property|function|do|'
1501
+ r'try|catch|in|continue|for|return|while|required|any|array|binary|boolean|'
1502
+ r'component|date|guid|numeric|query|string|struct|uuid|case)\b', Keyword),
1503
+ (r'(true|false|null)\b', Keyword.Constant),
1504
+ (r'(application|session|client|cookie|super|this|variables|arguments)\b',
1505
+ Name.Constant),
1506
+ (r'([a-z_$][\w.]*)(\s*)(\()',
1507
+ bygroups(Name.Function, Text, Punctuation)),
1508
+ (r'[a-z_$][\w.]*', Name.Variable),
1509
+ (r'[()\[\]{};:,.\\]', Punctuation),
1510
+ (r'\s+', Text),
1511
+ ],
1512
+ 'string': [
1513
+ (r'""', String.Double),
1514
+ (r'#.+?#', String.Interp),
1515
+ (r'[^"#]+', String.Double),
1516
+ (r'#', String.Double),
1517
+ (r'"', String.Double, '#pop'),
1518
+ ],
1519
+ }
1520
+
1521
+
1522
+ class ColdfusionMarkupLexer(RegexLexer):
1523
+ """
1524
+ Coldfusion markup only
1525
+ """
1526
+ name = 'Coldfusion'
1527
+ aliases = ['cf']
1528
+ filenames = []
1529
+ mimetypes = []
1530
+
1531
+ tokens = {
1532
+ 'root': [
1533
+ (r'[^<]+', Other),
1534
+ include('tags'),
1535
+ (r'<[^<>]*', Other),
1536
+ ],
1537
+ 'tags': [
1538
+ (r'<!---', Comment.Multiline, 'cfcomment'),
1539
+ (r'(?s)<!--.*?-->', Comment),
1540
+ (r'<cfoutput.*?>', Name.Builtin, 'cfoutput'),
1541
+ (r'(?s)(<cfscript.*?>)(.+?)(</cfscript.*?>)',
1542
+ bygroups(Name.Builtin, using(ColdfusionLexer), Name.Builtin)),
1543
+ # negative lookbehind is for strings with embedded >
1544
+ (r'(?s)(</?cf(?:component|include|if|else|elseif|loop|return|'
1545
+ r'dbinfo|dump|abort|location|invoke|throw|file|savecontent|'
1546
+ r'mailpart|mail|header|content|zip|image|lock|argument|try|'
1547
+ r'catch|break|directory|http|set|function|param)\b)(.*?)((?<!\\)>)',
1548
+ bygroups(Name.Builtin, using(ColdfusionLexer), Name.Builtin)),
1549
+ ],
1550
+ 'cfoutput': [
1551
+ (r'[^#<]+', Other),
1552
+ (r'(#)(.*?)(#)', bygroups(Punctuation, using(ColdfusionLexer),
1553
+ Punctuation)),
1554
+ # (r'<cfoutput.*?>', Name.Builtin, '#push'),
1555
+ (r'</cfoutput.*?>', Name.Builtin, '#pop'),
1556
+ include('tags'),
1557
+ (r'(?s)<[^<>]*', Other),
1558
+ (r'#', Other),
1559
+ ],
1560
+ 'cfcomment': [
1561
+ (r'<!---', Comment.Multiline, '#push'),
1562
+ (r'--->', Comment.Multiline, '#pop'),
1563
+ (r'([^<-]|<(?!!---)|-(?!-->))+', Comment.Multiline),
1564
+ ],
1565
+ }
1566
+
1567
+
1568
+ class ColdfusionHtmlLexer(DelegatingLexer):
1569
+ """
1570
+ Coldfusion markup in html
1571
+ """
1572
+ name = 'Coldfusion HTML'
1573
+ aliases = ['cfm']
1574
+ filenames = ['*.cfm', '*.cfml']
1575
+ mimetypes = ['application/x-coldfusion']
1576
+
1577
+ def __init__(self, **options):
1578
+ super().__init__(HtmlLexer, ColdfusionMarkupLexer, **options)
1579
+
1580
+
1581
+ class ColdfusionCFCLexer(DelegatingLexer):
1582
+ """
1583
+ Coldfusion markup/script components
1584
+
1585
+ .. versionadded:: 2.0
1586
+ """
1587
+ name = 'Coldfusion CFC'
1588
+ aliases = ['cfc']
1589
+ filenames = ['*.cfc']
1590
+ mimetypes = []
1591
+
1592
+ def __init__(self, **options):
1593
+ super().__init__(ColdfusionHtmlLexer, ColdfusionLexer, **options)
1594
+
1595
+
1596
+ class SspLexer(DelegatingLexer):
1597
+ """
1598
+ Lexer for Scalate Server Pages.
1599
+
1600
+ .. versionadded:: 1.4
1601
+ """
1602
+ name = 'Scalate Server Page'
1603
+ aliases = ['ssp']
1604
+ filenames = ['*.ssp']
1605
+ mimetypes = ['application/x-ssp']
1606
+
1607
+ def __init__(self, **options):
1608
+ super().__init__(XmlLexer, JspRootLexer, **options)
1609
+
1610
+ def analyse_text(text):
1611
+ rv = 0.0
1612
+ if re.search(r'val \w+\s*:', text):
1613
+ rv += 0.6
1614
+ if looks_like_xml(text):
1615
+ rv += 0.2
1616
+ if '<%' in text and '%>' in text:
1617
+ rv += 0.1
1618
+ return rv
1619
+
1620
+
1621
+ class TeaTemplateRootLexer(RegexLexer):
1622
+ """
1623
+ Base for the `TeaTemplateLexer`. Yields `Token.Other` for area outside of
1624
+ code blocks.
1625
+
1626
+ .. versionadded:: 1.5
1627
+ """
1628
+
1629
+ tokens = {
1630
+ 'root': [
1631
+ (r'<%\S?', Keyword, 'sec'),
1632
+ (r'[^<]+', Other),
1633
+ (r'<', Other),
1634
+ ],
1635
+ 'sec': [
1636
+ (r'%>', Keyword, '#pop'),
1637
+ # note: '\w\W' != '.' without DOTALL.
1638
+ (r'[\w\W]+?(?=%>|\Z)', using(TeaLangLexer)),
1639
+ ],
1640
+ }
1641
+
1642
+
1643
+ class TeaTemplateLexer(DelegatingLexer):
1644
+ """
1645
+ Lexer for `Tea Templates <http://teatrove.org/>`_.
1646
+
1647
+ .. versionadded:: 1.5
1648
+ """
1649
+ name = 'Tea'
1650
+ aliases = ['tea']
1651
+ filenames = ['*.tea']
1652
+ mimetypes = ['text/x-tea']
1653
+
1654
+ def __init__(self, **options):
1655
+ super().__init__(XmlLexer, TeaTemplateRootLexer, **options)
1656
+
1657
+ def analyse_text(text):
1658
+ rv = TeaLangLexer.analyse_text(text) - 0.01
1659
+ if looks_like_xml(text):
1660
+ rv += 0.4
1661
+ if '<%' in text and '%>' in text:
1662
+ rv += 0.1
1663
+ return rv
1664
+
1665
+
1666
+ class LassoHtmlLexer(DelegatingLexer):
1667
+ """
1668
+ Subclass of the `LassoLexer` which highlights unhandled data with the
1669
+ `HtmlLexer`.
1670
+
1671
+ Nested JavaScript and CSS is also highlighted.
1672
+
1673
+ .. versionadded:: 1.6
1674
+ """
1675
+
1676
+ name = 'HTML+Lasso'
1677
+ aliases = ['html+lasso']
1678
+ alias_filenames = ['*.html', '*.htm', '*.xhtml', '*.lasso', '*.lasso[89]',
1679
+ '*.incl', '*.inc', '*.las']
1680
+ mimetypes = ['text/html+lasso',
1681
+ 'application/x-httpd-lasso',
1682
+ 'application/x-httpd-lasso[89]']
1683
+
1684
+ def __init__(self, **options):
1685
+ super().__init__(HtmlLexer, LassoLexer, **options)
1686
+
1687
+ def analyse_text(text):
1688
+ rv = LassoLexer.analyse_text(text) - 0.01
1689
+ if html_doctype_matches(text): # same as HTML lexer
1690
+ rv += 0.5
1691
+ return rv
1692
+
1693
+
1694
+ class LassoXmlLexer(DelegatingLexer):
1695
+ """
1696
+ Subclass of the `LassoLexer` which highlights unhandled data with the
1697
+ `XmlLexer`.
1698
+
1699
+ .. versionadded:: 1.6
1700
+ """
1701
+
1702
+ name = 'XML+Lasso'
1703
+ aliases = ['xml+lasso']
1704
+ alias_filenames = ['*.xml', '*.lasso', '*.lasso[89]',
1705
+ '*.incl', '*.inc', '*.las']
1706
+ mimetypes = ['application/xml+lasso']
1707
+
1708
+ def __init__(self, **options):
1709
+ super().__init__(XmlLexer, LassoLexer, **options)
1710
+
1711
+ def analyse_text(text):
1712
+ rv = LassoLexer.analyse_text(text) - 0.01
1713
+ if looks_like_xml(text):
1714
+ rv += 0.4
1715
+ return rv
1716
+
1717
+
1718
+ class LassoCssLexer(DelegatingLexer):
1719
+ """
1720
+ Subclass of the `LassoLexer` which highlights unhandled data with the
1721
+ `CssLexer`.
1722
+
1723
+ .. versionadded:: 1.6
1724
+ """
1725
+
1726
+ name = 'CSS+Lasso'
1727
+ aliases = ['css+lasso']
1728
+ alias_filenames = ['*.css']
1729
+ mimetypes = ['text/css+lasso']
1730
+
1731
+ def __init__(self, **options):
1732
+ options['requiredelimiters'] = True
1733
+ super().__init__(CssLexer, LassoLexer, **options)
1734
+
1735
+ def analyse_text(text):
1736
+ rv = LassoLexer.analyse_text(text) - 0.05
1737
+ if re.search(r'\w+:[^;]+;', text):
1738
+ rv += 0.1
1739
+ if 'padding:' in text:
1740
+ rv += 0.1
1741
+ return rv
1742
+
1743
+
1744
+ class LassoJavascriptLexer(DelegatingLexer):
1745
+ """
1746
+ Subclass of the `LassoLexer` which highlights unhandled data with the
1747
+ `JavascriptLexer`.
1748
+
1749
+ .. versionadded:: 1.6
1750
+ """
1751
+
1752
+ name = 'JavaScript+Lasso'
1753
+ aliases = ['js+lasso', 'javascript+lasso']
1754
+ alias_filenames = ['*.js']
1755
+ mimetypes = ['application/x-javascript+lasso',
1756
+ 'text/x-javascript+lasso',
1757
+ 'text/javascript+lasso']
1758
+
1759
+ def __init__(self, **options):
1760
+ options['requiredelimiters'] = True
1761
+ super().__init__(JavascriptLexer, LassoLexer, **options)
1762
+
1763
+ def analyse_text(text):
1764
+ rv = LassoLexer.analyse_text(text) - 0.05
1765
+ return rv
1766
+
1767
+
1768
+ class HandlebarsLexer(RegexLexer):
1769
+ """
1770
+ Generic `handlebars <http://handlebarsjs.com/>` template lexer.
1771
+
1772
+ Highlights only the Handlebars template tags (stuff between `{{` and `}}`).
1773
+ Everything else is left for a delegating lexer.
1774
+
1775
+ .. versionadded:: 2.0
1776
+ """
1777
+
1778
+ name = "Handlebars"
1779
+ aliases = ['handlebars']
1780
+
1781
+ tokens = {
1782
+ 'root': [
1783
+ (r'[^{]+', Other),
1784
+
1785
+ # Comment start {{! }} or {{!--
1786
+ (r'\{\{!.*\}\}', Comment),
1787
+
1788
+ # HTML Escaping open {{{expression
1789
+ (r'(\{\{\{)(\s*)', bygroups(Comment.Special, Text), 'tag'),
1790
+
1791
+ # {{blockOpen {{#blockOpen {{/blockClose with optional tilde ~
1792
+ (r'(\{\{)([#~/]+)([^\s}]*)',
1793
+ bygroups(Comment.Preproc, Number.Attribute, Number.Attribute), 'tag'),
1794
+ (r'(\{\{)(\s*)', bygroups(Comment.Preproc, Text), 'tag'),
1795
+ ],
1796
+
1797
+ 'tag': [
1798
+ (r'\s+', Text),
1799
+ # HTML Escaping close }}}
1800
+ (r'\}\}\}', Comment.Special, '#pop'),
1801
+ # blockClose}}, includes optional tilde ~
1802
+ (r'(~?)(\}\})', bygroups(Number, Comment.Preproc), '#pop'),
1803
+
1804
+ # {{opt=something}}
1805
+ (r'([^\s}]+)(=)', bygroups(Name.Attribute, Operator)),
1806
+
1807
+ # Partials {{> ...}}
1808
+ (r'(>)(\s*)(@partial-block)', bygroups(Keyword, Text, Keyword)),
1809
+ (r'(#?>)(\s*)([\w-]+)', bygroups(Keyword, Text, Name.Variable)),
1810
+ (r'(>)(\s*)(\()', bygroups(Keyword, Text, Punctuation),
1811
+ 'dynamic-partial'),
1812
+
1813
+ include('generic'),
1814
+ ],
1815
+ 'dynamic-partial': [
1816
+ (r'\s+', Text),
1817
+ (r'\)', Punctuation, '#pop'),
1818
+
1819
+ (r'(lookup)(\s+)(\.|this)(\s+)', bygroups(Keyword, Text,
1820
+ Name.Variable, Text)),
1821
+ (r'(lookup)(\s+)(\S+)', bygroups(Keyword, Text,
1822
+ using(this, state='variable'))),
1823
+ (r'[\w-]+', Name.Function),
1824
+
1825
+ include('generic'),
1826
+ ],
1827
+ 'variable': [
1828
+ (r'[()/@a-zA-Z][\w-]*', Name.Variable),
1829
+ (r'\.[\w-]+', Name.Variable),
1830
+ (r'(this\/|\.\/|(\.\.\/)+)[\w-]+', Name.Variable),
1831
+ ],
1832
+ 'generic': [
1833
+ include('variable'),
1834
+
1835
+ # borrowed from DjangoLexer
1836
+ (r':?"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
1837
+ (r":?'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
1838
+ (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
1839
+ r"0[xX][0-9a-fA-F]+[Ll]?", Number),
1840
+ ]
1841
+ }
1842
+
1843
+
1844
+ class HandlebarsHtmlLexer(DelegatingLexer):
1845
+ """
1846
+ Subclass of the `HandlebarsLexer` that highlights unlexed data with the
1847
+ `HtmlLexer`.
1848
+
1849
+ .. versionadded:: 2.0
1850
+ """
1851
+
1852
+ name = "HTML+Handlebars"
1853
+ aliases = ["html+handlebars"]
1854
+ filenames = ['*.handlebars', '*.hbs']
1855
+ mimetypes = ['text/html+handlebars', 'text/x-handlebars-template']
1856
+
1857
+ def __init__(self, **options):
1858
+ super().__init__(HtmlLexer, HandlebarsLexer, **options)
1859
+
1860
+
1861
+ class YamlJinjaLexer(DelegatingLexer):
1862
+ """
1863
+ Subclass of the `DjangoLexer` that highlights unlexed data with the
1864
+ `YamlLexer`.
1865
+
1866
+ Commonly used in Saltstack salt states.
1867
+
1868
+ .. versionadded:: 2.0
1869
+ """
1870
+
1871
+ name = 'YAML+Jinja'
1872
+ aliases = ['yaml+jinja', 'salt', 'sls']
1873
+ filenames = ['*.sls']
1874
+ mimetypes = ['text/x-yaml+jinja', 'text/x-sls']
1875
+
1876
+ def __init__(self, **options):
1877
+ super().__init__(YamlLexer, DjangoLexer, **options)
1878
+
1879
+
1880
+ class LiquidLexer(RegexLexer):
1881
+ """
1882
+ Lexer for `Liquid templates
1883
+ <http://www.rubydoc.info/github/Shopify/liquid>`_.
1884
+
1885
+ .. versionadded:: 2.0
1886
+ """
1887
+ name = 'liquid'
1888
+ aliases = ['liquid']
1889
+ filenames = ['*.liquid']
1890
+
1891
+ tokens = {
1892
+ 'root': [
1893
+ (r'[^{]+', Text),
1894
+ # tags and block tags
1895
+ (r'(\{%)(\s*)', bygroups(Punctuation, Whitespace), 'tag-or-block'),
1896
+ # output tags
1897
+ (r'(\{\{)(\s*)([^\s}]+)',
1898
+ bygroups(Punctuation, Whitespace, using(this, state = 'generic')),
1899
+ 'output'),
1900
+ (r'\{', Text)
1901
+ ],
1902
+
1903
+ 'tag-or-block': [
1904
+ # builtin logic blocks
1905
+ (r'(if|unless|elsif|case)(?=\s+)', Keyword.Reserved, 'condition'),
1906
+ (r'(when)(\s+)', bygroups(Keyword.Reserved, Whitespace),
1907
+ combined('end-of-block', 'whitespace', 'generic')),
1908
+ (r'(else)(\s*)(%\})',
1909
+ bygroups(Keyword.Reserved, Whitespace, Punctuation), '#pop'),
1910
+
1911
+ # other builtin blocks
1912
+ (r'(capture)(\s+)([^\s%]+)(\s*)(%\})',
1913
+ bygroups(Name.Tag, Whitespace, using(this, state = 'variable'),
1914
+ Whitespace, Punctuation), '#pop'),
1915
+ (r'(comment)(\s*)(%\})',
1916
+ bygroups(Name.Tag, Whitespace, Punctuation), 'comment'),
1917
+ (r'(raw)(\s*)(%\})',
1918
+ bygroups(Name.Tag, Whitespace, Punctuation), 'raw'),
1919
+
1920
+ # end of block
1921
+ (r'(end(case|unless|if))(\s*)(%\})',
1922
+ bygroups(Keyword.Reserved, None, Whitespace, Punctuation), '#pop'),
1923
+ (r'(end([^\s%]+))(\s*)(%\})',
1924
+ bygroups(Name.Tag, None, Whitespace, Punctuation), '#pop'),
1925
+
1926
+ # builtin tags (assign and include are handled together with usual tags)
1927
+ (r'(cycle)(\s+)(?:([^\s:]*)(:))?(\s*)',
1928
+ bygroups(Name.Tag, Whitespace,
1929
+ using(this, state='generic'), Punctuation, Whitespace),
1930
+ 'variable-tag-markup'),
1931
+
1932
+ # other tags or blocks
1933
+ (r'([^\s%]+)(\s*)', bygroups(Name.Tag, Whitespace), 'tag-markup')
1934
+ ],
1935
+
1936
+ 'output': [
1937
+ include('whitespace'),
1938
+ (r'\}\}', Punctuation, '#pop'), # end of output
1939
+
1940
+ (r'\|', Punctuation, 'filters')
1941
+ ],
1942
+
1943
+ 'filters': [
1944
+ include('whitespace'),
1945
+ (r'\}\}', Punctuation, ('#pop', '#pop')), # end of filters and output
1946
+
1947
+ (r'([^\s|:]+)(:?)(\s*)',
1948
+ bygroups(Name.Function, Punctuation, Whitespace), 'filter-markup')
1949
+ ],
1950
+
1951
+ 'filter-markup': [
1952
+ (r'\|', Punctuation, '#pop'),
1953
+ include('end-of-tag'),
1954
+ include('default-param-markup')
1955
+ ],
1956
+
1957
+ 'condition': [
1958
+ include('end-of-block'),
1959
+ include('whitespace'),
1960
+
1961
+ (r'([^\s=!><]+)(\s*)([=!><]=?)(\s*)(\S+)(\s*)(%\})',
1962
+ bygroups(using(this, state = 'generic'), Whitespace, Operator,
1963
+ Whitespace, using(this, state = 'generic'), Whitespace,
1964
+ Punctuation)),
1965
+ (r'\b!', Operator),
1966
+ (r'\bnot\b', Operator.Word),
1967
+ (r'([\w.\'"]+)(\s+)(contains)(\s+)([\w.\'"]+)',
1968
+ bygroups(using(this, state = 'generic'), Whitespace, Operator.Word,
1969
+ Whitespace, using(this, state = 'generic'))),
1970
+
1971
+ include('generic'),
1972
+ include('whitespace')
1973
+ ],
1974
+
1975
+ 'generic-value': [
1976
+ include('generic'),
1977
+ include('end-at-whitespace')
1978
+ ],
1979
+
1980
+ 'operator': [
1981
+ (r'(\s*)((=|!|>|<)=?)(\s*)',
1982
+ bygroups(Whitespace, Operator, None, Whitespace), '#pop'),
1983
+ (r'(\s*)(\bcontains\b)(\s*)',
1984
+ bygroups(Whitespace, Operator.Word, Whitespace), '#pop'),
1985
+ ],
1986
+
1987
+ 'end-of-tag': [
1988
+ (r'\}\}', Punctuation, '#pop')
1989
+ ],
1990
+
1991
+ 'end-of-block': [
1992
+ (r'%\}', Punctuation, ('#pop', '#pop'))
1993
+ ],
1994
+
1995
+ 'end-at-whitespace': [
1996
+ (r'\s+', Whitespace, '#pop')
1997
+ ],
1998
+
1999
+ # states for unknown markup
2000
+ 'param-markup': [
2001
+ include('whitespace'),
2002
+ # params with colons or equals
2003
+ (r'([^\s=:]+)(\s*)(=|:)',
2004
+ bygroups(Name.Attribute, Whitespace, Operator)),
2005
+ # explicit variables
2006
+ (r'(\{\{)(\s*)([^\s}])(\s*)(\}\})',
2007
+ bygroups(Punctuation, Whitespace, using(this, state = 'variable'),
2008
+ Whitespace, Punctuation)),
2009
+
2010
+ include('string'),
2011
+ include('number'),
2012
+ include('keyword'),
2013
+ (r',', Punctuation)
2014
+ ],
2015
+
2016
+ 'default-param-markup': [
2017
+ include('param-markup'),
2018
+ (r'.', Text) # fallback for switches / variables / un-quoted strings / ...
2019
+ ],
2020
+
2021
+ 'variable-param-markup': [
2022
+ include('param-markup'),
2023
+ include('variable'),
2024
+ (r'.', Text) # fallback
2025
+ ],
2026
+
2027
+ 'tag-markup': [
2028
+ (r'%\}', Punctuation, ('#pop', '#pop')), # end of tag
2029
+ include('default-param-markup')
2030
+ ],
2031
+
2032
+ 'variable-tag-markup': [
2033
+ (r'%\}', Punctuation, ('#pop', '#pop')), # end of tag
2034
+ include('variable-param-markup')
2035
+ ],
2036
+
2037
+ # states for different values types
2038
+ 'keyword': [
2039
+ (r'\b(false|true)\b', Keyword.Constant)
2040
+ ],
2041
+
2042
+ 'variable': [
2043
+ (r'[a-zA-Z_]\w*', Name.Variable),
2044
+ (r'(?<=\w)\.(?=\w)', Punctuation)
2045
+ ],
2046
+
2047
+ 'string': [
2048
+ (r"'[^']*'", String.Single),
2049
+ (r'"[^"]*"', String.Double)
2050
+ ],
2051
+
2052
+ 'number': [
2053
+ (r'\d+\.\d+', Number.Float),
2054
+ (r'\d+', Number.Integer)
2055
+ ],
2056
+
2057
+ 'generic': [ # decides for variable, string, keyword or number
2058
+ include('keyword'),
2059
+ include('string'),
2060
+ include('number'),
2061
+ include('variable')
2062
+ ],
2063
+
2064
+ 'whitespace': [
2065
+ (r'[ \t]+', Whitespace)
2066
+ ],
2067
+
2068
+ # states for builtin blocks
2069
+ 'comment': [
2070
+ (r'(\{%)(\s*)(endcomment)(\s*)(%\})',
2071
+ bygroups(Punctuation, Whitespace, Name.Tag, Whitespace,
2072
+ Punctuation), ('#pop', '#pop')),
2073
+ (r'.', Comment)
2074
+ ],
2075
+
2076
+ 'raw': [
2077
+ (r'[^{]+', Text),
2078
+ (r'(\{%)(\s*)(endraw)(\s*)(%\})',
2079
+ bygroups(Punctuation, Whitespace, Name.Tag, Whitespace,
2080
+ Punctuation), '#pop'),
2081
+ (r'\{', Text)
2082
+ ],
2083
+ }
2084
+
2085
+
2086
+ class TwigLexer(RegexLexer):
2087
+ """
2088
+ `Twig <http://twig.sensiolabs.org/>`_ template lexer.
2089
+
2090
+ It just highlights Twig code between the preprocessor directives,
2091
+ other data is left untouched by the lexer.
2092
+
2093
+ .. versionadded:: 2.0
2094
+ """
2095
+
2096
+ name = 'Twig'
2097
+ aliases = ['twig']
2098
+ mimetypes = ['application/x-twig']
2099
+
2100
+ flags = re.M | re.S
2101
+
2102
+ # Note that a backslash is included in the following two patterns
2103
+ # PHP uses a backslash as a namespace separator
2104
+ _ident_char = r'[\\\w-]|[^\x00-\x7f]'
2105
+ _ident_begin = r'(?:[\\_a-z]|[^\x00-\x7f])'
2106
+ _ident_end = r'(?:' + _ident_char + ')*'
2107
+ _ident_inner = _ident_begin + _ident_end
2108
+
2109
+ tokens = {
2110
+ 'root': [
2111
+ (r'[^{]+', Other),
2112
+ (r'\{\{', Comment.Preproc, 'var'),
2113
+ # twig comments
2114
+ (r'\{\#.*?\#\}', Comment),
2115
+ # raw twig blocks
2116
+ (r'(\{%)(-?\s*)(raw)(\s*-?)(%\})(.*?)'
2117
+ r'(\{%)(-?\s*)(endraw)(\s*-?)(%\})',
2118
+ bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
2119
+ Other, Comment.Preproc, Text, Keyword, Text,
2120
+ Comment.Preproc)),
2121
+ (r'(\{%)(-?\s*)(verbatim)(\s*-?)(%\})(.*?)'
2122
+ r'(\{%)(-?\s*)(endverbatim)(\s*-?)(%\})',
2123
+ bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
2124
+ Other, Comment.Preproc, Text, Keyword, Text,
2125
+ Comment.Preproc)),
2126
+ # filter blocks
2127
+ (r'(\{%%)(-?\s*)(filter)(\s+)(%s)' % _ident_inner,
2128
+ bygroups(Comment.Preproc, Text, Keyword, Text, Name.Function),
2129
+ 'tag'),
2130
+ (r'(\{%)(-?\s*)([a-zA-Z_]\w*)',
2131
+ bygroups(Comment.Preproc, Text, Keyword), 'tag'),
2132
+ (r'\{', Other),
2133
+ ],
2134
+ 'varnames': [
2135
+ (r'(\|)(\s*)(%s)' % _ident_inner,
2136
+ bygroups(Operator, Text, Name.Function)),
2137
+ (r'(is)(\s+)(not)?(\s*)(%s)' % _ident_inner,
2138
+ bygroups(Keyword, Text, Keyword, Text, Name.Function)),
2139
+ (r'(?i)(true|false|none|null)\b', Keyword.Pseudo),
2140
+ (r'(in|not|and|b-and|or|b-or|b-xor|is'
2141
+ r'if|elseif|else|import'
2142
+ r'constant|defined|divisibleby|empty|even|iterable|odd|sameas'
2143
+ r'matches|starts\s+with|ends\s+with)\b',
2144
+ Keyword),
2145
+ (r'(loop|block|parent)\b', Name.Builtin),
2146
+ (_ident_inner, Name.Variable),
2147
+ (r'\.' + _ident_inner, Name.Variable),
2148
+ (r'\.[0-9]+', Number),
2149
+ (r':?"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
2150
+ (r":?'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
2151
+ (r'([{}()\[\]+\-*/,:~%]|\.\.|\?|:|\*\*|\/\/|!=|[><=]=?)', Operator),
2152
+ (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
2153
+ r"0[xX][0-9a-fA-F]+[Ll]?", Number),
2154
+ ],
2155
+ 'var': [
2156
+ (r'\s+', Text),
2157
+ (r'(-?)(\}\})', bygroups(Text, Comment.Preproc), '#pop'),
2158
+ include('varnames')
2159
+ ],
2160
+ 'tag': [
2161
+ (r'\s+', Text),
2162
+ (r'(-?)(%\})', bygroups(Text, Comment.Preproc), '#pop'),
2163
+ include('varnames'),
2164
+ (r'.', Punctuation),
2165
+ ],
2166
+ }
2167
+
2168
+
2169
+ class TwigHtmlLexer(DelegatingLexer):
2170
+ """
2171
+ Subclass of the `TwigLexer` that highlights unlexed data with the
2172
+ `HtmlLexer`.
2173
+
2174
+ .. versionadded:: 2.0
2175
+ """
2176
+
2177
+ name = "HTML+Twig"
2178
+ aliases = ["html+twig"]
2179
+ filenames = ['*.twig']
2180
+ mimetypes = ['text/html+twig']
2181
+
2182
+ def __init__(self, **options):
2183
+ super().__init__(HtmlLexer, TwigLexer, **options)
2184
+
2185
+
2186
+ class Angular2Lexer(RegexLexer):
2187
+ """
2188
+ Generic
2189
+ `angular2 <http://victorsavkin.com/post/119943127151/angular-2-template-syntax>`_
2190
+ template lexer.
2191
+
2192
+ Highlights only the Angular template tags (stuff between `{{` and `}}` and
2193
+ special attributes: '(event)=', '[property]=', '[(twoWayBinding)]=').
2194
+ Everything else is left for a delegating lexer.
2195
+
2196
+ .. versionadded:: 2.1
2197
+ """
2198
+
2199
+ name = "Angular2"
2200
+ aliases = ['ng2']
2201
+
2202
+ tokens = {
2203
+ 'root': [
2204
+ (r'[^{([*#]+', Other),
2205
+
2206
+ # {{meal.name}}
2207
+ (r'(\{\{)(\s*)', bygroups(Comment.Preproc, Text), 'ngExpression'),
2208
+
2209
+ # (click)="deleteOrder()"; [value]="test"; [(twoWayTest)]="foo.bar"
2210
+ (r'([([]+)([\w:.-]+)([\])]+)(\s*)(=)(\s*)',
2211
+ bygroups(Punctuation, Name.Attribute, Punctuation, Text, Operator, Text),
2212
+ 'attr'),
2213
+ (r'([([]+)([\w:.-]+)([\])]+)(\s*)',
2214
+ bygroups(Punctuation, Name.Attribute, Punctuation, Text)),
2215
+
2216
+ # *ngIf="..."; #f="ngForm"
2217
+ (r'([*#])([\w:.-]+)(\s*)(=)(\s*)',
2218
+ bygroups(Punctuation, Name.Attribute, Text, Operator, Text), 'attr'),
2219
+ (r'([*#])([\w:.-]+)(\s*)',
2220
+ bygroups(Punctuation, Name.Attribute, Text)),
2221
+ ],
2222
+
2223
+ 'ngExpression': [
2224
+ (r'\s+(\|\s+)?', Text),
2225
+ (r'\}\}', Comment.Preproc, '#pop'),
2226
+
2227
+ # Literals
2228
+ (r':?(true|false)', String.Boolean),
2229
+ (r':?"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
2230
+ (r":?'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
2231
+ (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
2232
+ r"0[xX][0-9a-fA-F]+[Ll]?", Number),
2233
+
2234
+ # Variabletext
2235
+ (r'[a-zA-Z][\w-]*(\(.*\))?', Name.Variable),
2236
+ (r'\.[\w-]+(\(.*\))?', Name.Variable),
2237
+
2238
+ # inline If
2239
+ (r'(\?)(\s*)([^}\s]+)(\s*)(:)(\s*)([^}\s]+)(\s*)',
2240
+ bygroups(Operator, Text, String, Text, Operator, Text, String, Text)),
2241
+ ],
2242
+ 'attr': [
2243
+ ('".*?"', String, '#pop'),
2244
+ ("'.*?'", String, '#pop'),
2245
+ (r'[^\s>]+', String, '#pop'),
2246
+ ],
2247
+ }
2248
+
2249
+
2250
+ class Angular2HtmlLexer(DelegatingLexer):
2251
+ """
2252
+ Subclass of the `Angular2Lexer` that highlights unlexed data with the
2253
+ `HtmlLexer`.
2254
+
2255
+ .. versionadded:: 2.0
2256
+ """
2257
+
2258
+ name = "HTML + Angular2"
2259
+ aliases = ["html+ng2"]
2260
+ filenames = ['*.ng2']
2261
+
2262
+ def __init__(self, **options):
2263
+ super().__init__(HtmlLexer, Angular2Lexer, **options)