pygments.rb 1.2.1 → 2.0.0.rc2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (378) hide show
  1. checksums.yaml +5 -5
  2. data/.github/dependabot.yml +13 -0
  3. data/.github/workflows/ci.yml +28 -0
  4. data/.github/workflows/release.yml +24 -0
  5. data/.gitignore +5 -6
  6. data/CHANGELOG.adoc +119 -0
  7. data/Gemfile +3 -1
  8. data/LICENSE +1 -1
  9. data/README.adoc +161 -0
  10. data/Rakefile +10 -21
  11. data/bench.rb +8 -7
  12. data/cache-lexers.rb +3 -2
  13. data/lib/pygments.rb +10 -11
  14. data/lib/pygments/lexer.rb +5 -5
  15. data/lib/pygments/mentos.py +23 -66
  16. data/lib/pygments/popen.rb +152 -204
  17. data/lib/pygments/version.rb +2 -2
  18. data/pygments.rb.gemspec +11 -9
  19. data/test/test_pygments.rb +51 -84
  20. data/vendor/pygments-main/{AUTHORS → Pygments-2.7.3.dist-info/AUTHORS} +21 -3
  21. data/vendor/pygments-main/Pygments-2.7.3.dist-info/INSTALLER +1 -0
  22. data/vendor/pygments-main/{LICENSE → Pygments-2.7.3.dist-info/LICENSE} +1 -1
  23. data/vendor/pygments-main/Pygments-2.7.3.dist-info/METADATA +49 -0
  24. data/vendor/pygments-main/Pygments-2.7.3.dist-info/RECORD +482 -0
  25. data/vendor/pygments-main/Pygments-2.7.3.dist-info/REQUESTED +0 -0
  26. data/vendor/pygments-main/Pygments-2.7.3.dist-info/WHEEL +5 -0
  27. data/vendor/pygments-main/Pygments-2.7.3.dist-info/entry_points.txt +3 -0
  28. data/vendor/pygments-main/Pygments-2.7.3.dist-info/top_level.txt +1 -0
  29. data/vendor/pygments-main/bin/pygmentize +8 -0
  30. data/vendor/pygments-main/pygments/__init__.py +6 -11
  31. data/vendor/pygments-main/pygments/__main__.py +18 -0
  32. data/vendor/pygments-main/pygments/cmdline.py +38 -29
  33. data/vendor/pygments-main/pygments/console.py +6 -9
  34. data/vendor/pygments-main/pygments/filter.py +4 -6
  35. data/vendor/pygments-main/pygments/filters/__init__.py +609 -21
  36. data/vendor/pygments-main/pygments/formatter.py +4 -4
  37. data/vendor/pygments-main/pygments/formatters/__init__.py +9 -8
  38. data/vendor/pygments-main/pygments/formatters/_mapping.py +1 -3
  39. data/vendor/pygments-main/pygments/formatters/bbcode.py +1 -1
  40. data/vendor/pygments-main/pygments/formatters/html.py +223 -135
  41. data/vendor/pygments-main/pygments/formatters/img.py +68 -41
  42. data/vendor/pygments-main/pygments/formatters/irc.py +39 -39
  43. data/vendor/pygments-main/pygments/formatters/latex.py +56 -26
  44. data/vendor/pygments-main/pygments/formatters/other.py +12 -8
  45. data/vendor/pygments-main/pygments/formatters/rtf.py +29 -29
  46. data/vendor/pygments-main/pygments/formatters/svg.py +38 -4
  47. data/vendor/pygments-main/pygments/formatters/terminal.py +25 -31
  48. data/vendor/pygments-main/pygments/formatters/terminal256.py +22 -12
  49. data/vendor/pygments-main/pygments/lexer.py +41 -39
  50. data/vendor/pygments-main/pygments/lexers/__init__.py +342 -0
  51. data/vendor/pygments-main/pygments/lexers/_asy_builtins.py +1645 -0
  52. data/vendor/pygments-main/pygments/lexers/_cl_builtins.py +232 -0
  53. data/vendor/pygments-main/pygments/lexers/_cocoa_builtins.py +71 -0
  54. data/vendor/pygments-main/pygments/lexers/_csound_builtins.py +1725 -0
  55. data/vendor/pygments-main/pygments/lexers/_lasso_builtins.py +5327 -0
  56. data/vendor/pygments-main/pygments/lexers/_lua_builtins.py +293 -0
  57. data/vendor/pygments-main/pygments/lexers/_mapping.py +551 -0
  58. data/vendor/pygments-main/pygments/lexers/_mql_builtins.py +1172 -0
  59. data/vendor/pygments-main/pygments/lexers/_mysql_builtins.py +1282 -0
  60. data/vendor/pygments-main/pygments/lexers/_openedge_builtins.py +2547 -0
  61. data/vendor/pygments-main/pygments/lexers/_php_builtins.py +4753 -0
  62. data/vendor/pygments-main/pygments/lexers/_postgres_builtins.py +678 -0
  63. data/vendor/pygments-main/pygments/lexers/_scilab_builtins.py +3094 -0
  64. data/vendor/pygments-main/pygments/lexers/_sourcemod_builtins.py +1161 -0
  65. data/vendor/pygments-main/pygments/lexers/_stan_builtins.py +558 -0
  66. data/vendor/pygments-main/pygments/lexers/_stata_builtins.py +421 -0
  67. data/vendor/pygments-main/pygments/lexers/_tsql_builtins.py +1004 -0
  68. data/vendor/pygments-main/pygments/lexers/_usd_builtins.py +113 -0
  69. data/vendor/pygments-main/pygments/lexers/_vbscript_builtins.py +280 -0
  70. data/vendor/pygments-main/pygments/lexers/_vim_builtins.py +1939 -0
  71. data/vendor/pygments-main/pygments/lexers/actionscript.py +245 -0
  72. data/vendor/pygments-main/pygments/lexers/agile.py +24 -0
  73. data/vendor/pygments-main/pygments/lexers/algebra.py +240 -0
  74. data/vendor/pygments-main/pygments/lexers/ambient.py +76 -0
  75. data/vendor/pygments-main/pygments/lexers/ampl.py +87 -0
  76. data/vendor/pygments-main/pygments/lexers/apl.py +101 -0
  77. data/vendor/pygments-main/pygments/lexers/archetype.py +318 -0
  78. data/vendor/pygments-main/pygments/lexers/arrow.py +117 -0
  79. data/vendor/pygments-main/pygments/lexers/asm.py +1005 -0
  80. data/vendor/pygments-main/pygments/lexers/automation.py +374 -0
  81. data/vendor/pygments-main/pygments/lexers/bare.py +104 -0
  82. data/vendor/pygments-main/pygments/lexers/basic.py +662 -0
  83. data/vendor/pygments-main/pygments/lexers/bibtex.py +160 -0
  84. data/vendor/pygments-main/pygments/lexers/boa.py +102 -0
  85. data/vendor/pygments-main/pygments/lexers/business.py +627 -0
  86. data/vendor/pygments-main/pygments/lexers/c_cpp.py +344 -0
  87. data/vendor/pygments-main/pygments/lexers/c_like.py +566 -0
  88. data/vendor/pygments-main/pygments/lexers/capnproto.py +78 -0
  89. data/vendor/pygments-main/pygments/lexers/chapel.py +112 -0
  90. data/vendor/pygments-main/pygments/lexers/clean.py +179 -0
  91. data/vendor/pygments-main/pygments/lexers/compiled.py +34 -0
  92. data/vendor/pygments-main/pygments/lexers/configs.py +984 -0
  93. data/vendor/pygments-main/pygments/lexers/console.py +114 -0
  94. data/vendor/pygments-main/pygments/lexers/crystal.py +392 -0
  95. data/vendor/pygments-main/pygments/lexers/csound.py +467 -0
  96. data/vendor/pygments-main/pygments/lexers/css.py +691 -0
  97. data/vendor/pygments-main/pygments/lexers/d.py +256 -0
  98. data/vendor/pygments-main/pygments/lexers/dalvik.py +125 -0
  99. data/vendor/pygments-main/pygments/lexers/data.py +698 -0
  100. data/vendor/pygments-main/pygments/lexers/devicetree.py +109 -0
  101. data/vendor/pygments-main/pygments/lexers/diff.py +165 -0
  102. data/vendor/pygments-main/pygments/lexers/dotnet.py +707 -0
  103. data/vendor/pygments-main/pygments/lexers/dsls.py +960 -0
  104. data/vendor/pygments-main/pygments/lexers/dylan.py +287 -0
  105. data/vendor/pygments-main/pygments/lexers/ecl.py +139 -0
  106. data/vendor/pygments-main/pygments/lexers/eiffel.py +65 -0
  107. data/vendor/pygments-main/pygments/lexers/elm.py +121 -0
  108. data/vendor/pygments-main/pygments/lexers/email.py +151 -0
  109. data/vendor/pygments-main/pygments/lexers/erlang.py +530 -0
  110. data/vendor/pygments-main/pygments/lexers/esoteric.py +304 -0
  111. data/vendor/pygments-main/pygments/lexers/ezhil.py +77 -0
  112. data/vendor/pygments-main/pygments/lexers/factor.py +344 -0
  113. data/vendor/pygments-main/pygments/lexers/fantom.py +250 -0
  114. data/vendor/pygments-main/pygments/lexers/felix.py +273 -0
  115. data/vendor/pygments-main/pygments/lexers/floscript.py +83 -0
  116. data/vendor/pygments-main/pygments/lexers/forth.py +178 -0
  117. data/vendor/pygments-main/pygments/lexers/fortran.py +206 -0
  118. data/vendor/pygments-main/pygments/lexers/foxpro.py +428 -0
  119. data/vendor/pygments-main/pygments/lexers/freefem.py +898 -0
  120. data/vendor/pygments-main/pygments/lexers/functional.py +21 -0
  121. data/vendor/pygments-main/pygments/lexers/gdscript.py +346 -0
  122. data/vendor/pygments-main/pygments/lexers/go.py +101 -0
  123. data/vendor/pygments-main/pygments/lexers/grammar_notation.py +270 -0
  124. data/vendor/pygments-main/pygments/lexers/graph.py +85 -0
  125. data/vendor/pygments-main/pygments/lexers/graphics.py +800 -0
  126. data/vendor/pygments-main/pygments/lexers/haskell.py +870 -0
  127. data/vendor/pygments-main/pygments/lexers/haxe.py +936 -0
  128. data/vendor/pygments-main/pygments/lexers/hdl.py +472 -0
  129. data/vendor/pygments-main/pygments/lexers/hexdump.py +103 -0
  130. data/vendor/pygments-main/pygments/lexers/html.py +614 -0
  131. data/vendor/pygments-main/pygments/lexers/idl.py +281 -0
  132. data/vendor/pygments-main/pygments/lexers/igor.py +420 -0
  133. data/vendor/pygments-main/pygments/lexers/inferno.py +96 -0
  134. data/vendor/pygments-main/pygments/lexers/installers.py +322 -0
  135. data/vendor/pygments-main/pygments/lexers/int_fiction.py +1368 -0
  136. data/vendor/pygments-main/pygments/lexers/iolang.py +63 -0
  137. data/vendor/pygments-main/pygments/lexers/j.py +146 -0
  138. data/vendor/pygments-main/pygments/lexers/javascript.py +1540 -0
  139. data/vendor/pygments-main/pygments/lexers/julia.py +331 -0
  140. data/vendor/pygments-main/pygments/lexers/jvm.py +1673 -0
  141. data/vendor/pygments-main/pygments/lexers/lisp.py +2699 -0
  142. data/vendor/pygments-main/pygments/lexers/make.py +206 -0
  143. data/vendor/pygments-main/pygments/lexers/markup.py +765 -0
  144. data/vendor/pygments-main/pygments/lexers/math.py +21 -0
  145. data/vendor/pygments-main/pygments/lexers/matlab.py +720 -0
  146. data/vendor/pygments-main/pygments/lexers/mime.py +226 -0
  147. data/vendor/pygments-main/pygments/lexers/ml.py +958 -0
  148. data/vendor/pygments-main/pygments/lexers/modeling.py +366 -0
  149. data/vendor/pygments-main/pygments/lexers/modula2.py +1580 -0
  150. data/vendor/pygments-main/pygments/lexers/monte.py +204 -0
  151. data/vendor/pygments-main/pygments/lexers/mosel.py +448 -0
  152. data/vendor/pygments-main/pygments/lexers/ncl.py +894 -0
  153. data/vendor/pygments-main/pygments/lexers/nimrod.py +159 -0
  154. data/vendor/pygments-main/pygments/lexers/nit.py +64 -0
  155. data/vendor/pygments-main/pygments/lexers/nix.py +136 -0
  156. data/vendor/pygments-main/pygments/lexers/oberon.py +121 -0
  157. data/vendor/pygments-main/pygments/lexers/objective.py +504 -0
  158. data/vendor/pygments-main/pygments/lexers/ooc.py +85 -0
  159. data/vendor/pygments-main/pygments/lexers/other.py +41 -0
  160. data/vendor/pygments-main/pygments/lexers/parasail.py +79 -0
  161. data/vendor/pygments-main/pygments/lexers/parsers.py +800 -0
  162. data/vendor/pygments-main/pygments/lexers/pascal.py +644 -0
  163. data/vendor/pygments-main/pygments/lexers/pawn.py +205 -0
  164. data/vendor/pygments-main/pygments/lexers/perl.py +732 -0
  165. data/vendor/pygments-main/pygments/lexers/php.py +321 -0
  166. data/vendor/pygments-main/pygments/lexers/pointless.py +71 -0
  167. data/vendor/pygments-main/pygments/lexers/pony.py +94 -0
  168. data/vendor/pygments-main/pygments/lexers/praat.py +302 -0
  169. data/vendor/pygments-main/pygments/lexers/prolog.py +306 -0
  170. data/vendor/pygments-main/pygments/lexers/promql.py +183 -0
  171. data/vendor/pygments-main/pygments/lexers/python.py +1151 -0
  172. data/vendor/pygments-main/pygments/lexers/qvt.py +152 -0
  173. data/vendor/pygments-main/pygments/lexers/r.py +191 -0
  174. data/vendor/pygments-main/pygments/lexers/rdf.py +463 -0
  175. data/vendor/pygments-main/pygments/lexers/rebol.py +431 -0
  176. data/vendor/pygments-main/pygments/lexers/resource.py +85 -0
  177. data/vendor/pygments-main/pygments/lexers/ride.py +139 -0
  178. data/vendor/pygments-main/pygments/lexers/rnc.py +67 -0
  179. data/vendor/pygments-main/pygments/lexers/roboconf.py +82 -0
  180. data/vendor/pygments-main/pygments/lexers/robotframework.py +552 -0
  181. data/vendor/pygments-main/pygments/lexers/ruby.py +517 -0
  182. data/vendor/pygments-main/pygments/lexers/rust.py +224 -0
  183. data/vendor/pygments-main/pygments/lexers/sas.py +228 -0
  184. data/vendor/pygments-main/pygments/lexers/scdoc.py +83 -0
  185. data/vendor/pygments-main/pygments/lexers/scripting.py +1284 -0
  186. data/vendor/pygments-main/pygments/lexers/sgf.py +61 -0
  187. data/vendor/pygments-main/pygments/lexers/shell.py +914 -0
  188. data/vendor/pygments-main/pygments/lexers/sieve.py +69 -0
  189. data/vendor/pygments-main/pygments/lexers/slash.py +185 -0
  190. data/vendor/pygments-main/pygments/lexers/smalltalk.py +195 -0
  191. data/vendor/pygments-main/pygments/lexers/smv.py +79 -0
  192. data/vendor/pygments-main/pygments/lexers/snobol.py +83 -0
  193. data/vendor/pygments-main/pygments/lexers/solidity.py +92 -0
  194. data/vendor/pygments-main/pygments/lexers/special.py +105 -0
  195. data/vendor/pygments-main/pygments/lexers/sql.py +837 -0
  196. data/vendor/pygments-main/pygments/lexers/stata.py +171 -0
  197. data/vendor/pygments-main/pygments/lexers/supercollider.py +95 -0
  198. data/vendor/pygments-main/pygments/lexers/tcl.py +145 -0
  199. data/vendor/pygments-main/pygments/lexers/templates.py +2264 -0
  200. data/vendor/pygments-main/pygments/lexers/teraterm.py +335 -0
  201. data/vendor/pygments-main/pygments/lexers/testing.py +207 -0
  202. data/vendor/pygments-main/pygments/lexers/text.py +26 -0
  203. data/vendor/pygments-main/pygments/lexers/textedit.py +169 -0
  204. data/vendor/pygments-main/pygments/lexers/textfmts.py +430 -0
  205. data/vendor/pygments-main/pygments/lexers/theorem.py +474 -0
  206. data/vendor/pygments-main/pygments/lexers/tnt.py +263 -0
  207. data/vendor/pygments-main/pygments/lexers/trafficscript.py +54 -0
  208. data/vendor/pygments-main/pygments/lexers/typoscript.py +219 -0
  209. data/vendor/pygments-main/pygments/lexers/unicon.py +412 -0
  210. data/vendor/pygments-main/pygments/lexers/urbi.py +146 -0
  211. data/vendor/pygments-main/pygments/lexers/usd.py +90 -0
  212. data/vendor/pygments-main/pygments/lexers/varnish.py +190 -0
  213. data/vendor/pygments-main/pygments/lexers/verification.py +114 -0
  214. data/vendor/pygments-main/pygments/lexers/web.py +24 -0
  215. data/vendor/pygments-main/pygments/lexers/webidl.py +299 -0
  216. data/vendor/pygments-main/pygments/lexers/webmisc.py +991 -0
  217. data/vendor/pygments-main/pygments/lexers/whiley.py +116 -0
  218. data/vendor/pygments-main/pygments/lexers/x10.py +69 -0
  219. data/vendor/pygments-main/pygments/lexers/xorg.py +37 -0
  220. data/vendor/pygments-main/pygments/lexers/yang.py +104 -0
  221. data/vendor/pygments-main/pygments/lexers/zig.py +124 -0
  222. data/vendor/pygments-main/pygments/modeline.py +1 -1
  223. data/vendor/pygments-main/pygments/plugin.py +4 -2
  224. data/vendor/pygments-main/pygments/regexopt.py +1 -1
  225. data/vendor/pygments-main/pygments/scanner.py +2 -2
  226. data/vendor/pygments-main/pygments/sphinxext.py +2 -4
  227. data/vendor/pygments-main/pygments/style.py +61 -24
  228. data/vendor/pygments-main/pygments/styles/__init__.py +10 -4
  229. data/vendor/pygments-main/pygments/styles/abap.py +1 -1
  230. data/vendor/pygments-main/pygments/styles/algol.py +1 -1
  231. data/vendor/pygments-main/pygments/styles/algol_nu.py +1 -1
  232. data/vendor/pygments-main/pygments/styles/arduino.py +2 -2
  233. data/vendor/pygments-main/pygments/styles/autumn.py +1 -1
  234. data/vendor/pygments-main/pygments/styles/borland.py +1 -1
  235. data/vendor/pygments-main/pygments/styles/bw.py +1 -1
  236. data/vendor/pygments-main/pygments/styles/colorful.py +1 -1
  237. data/vendor/pygments-main/pygments/styles/default.py +1 -1
  238. data/vendor/pygments-main/pygments/styles/emacs.py +1 -1
  239. data/vendor/pygments-main/pygments/styles/friendly.py +1 -1
  240. data/vendor/pygments-main/pygments/styles/fruity.py +1 -1
  241. data/vendor/pygments-main/pygments/styles/igor.py +1 -1
  242. data/vendor/pygments-main/pygments/styles/inkpot.py +67 -0
  243. data/vendor/pygments-main/pygments/styles/lovelace.py +1 -1
  244. data/vendor/pygments-main/pygments/styles/manni.py +1 -1
  245. data/vendor/pygments-main/pygments/styles/monokai.py +4 -3
  246. data/vendor/pygments-main/pygments/styles/murphy.py +1 -1
  247. data/vendor/pygments-main/pygments/styles/native.py +1 -1
  248. data/vendor/pygments-main/pygments/styles/paraiso_dark.py +1 -1
  249. data/vendor/pygments-main/pygments/styles/paraiso_light.py +1 -1
  250. data/vendor/pygments-main/pygments/styles/pastie.py +1 -1
  251. data/vendor/pygments-main/pygments/styles/perldoc.py +1 -1
  252. data/vendor/pygments-main/pygments/styles/rainbow_dash.py +1 -1
  253. data/vendor/pygments-main/pygments/styles/rrt.py +1 -1
  254. data/vendor/pygments-main/pygments/styles/sas.py +1 -1
  255. data/vendor/pygments-main/pygments/styles/solarized.py +134 -0
  256. data/vendor/pygments-main/pygments/styles/stata_dark.py +41 -0
  257. data/vendor/pygments-main/pygments/styles/{stata.py → stata_light.py} +14 -15
  258. data/vendor/pygments-main/pygments/styles/tango.py +1 -1
  259. data/vendor/pygments-main/pygments/styles/trac.py +1 -1
  260. data/vendor/pygments-main/pygments/styles/vim.py +1 -1
  261. data/vendor/pygments-main/pygments/styles/vs.py +1 -1
  262. data/vendor/pygments-main/pygments/styles/xcode.py +1 -1
  263. data/vendor/pygments-main/pygments/token.py +1 -1
  264. data/vendor/pygments-main/pygments/unistring.py +47 -108
  265. data/vendor/pygments-main/pygments/util.py +15 -92
  266. metadata +69 -136
  267. data/CHANGELOG.md +0 -111
  268. data/README.md +0 -121
  269. data/circle.yml +0 -20
  270. data/test/test_data.py +0 -514
  271. data/test/test_data_generated +0 -2582
  272. data/vendor/custom_lexers/github.py +0 -565
  273. data/vendor/pygments-main/CHANGES +0 -1186
  274. data/vendor/pygments-main/MANIFEST.in +0 -6
  275. data/vendor/pygments-main/Makefile +0 -65
  276. data/vendor/pygments-main/README.rst +0 -39
  277. data/vendor/pygments-main/REVISION +0 -1
  278. data/vendor/pygments-main/TODO +0 -12
  279. data/vendor/pygments-main/doc/Makefile +0 -153
  280. data/vendor/pygments-main/doc/_static/favicon.ico +0 -0
  281. data/vendor/pygments-main/doc/_static/logo_new.png +0 -0
  282. data/vendor/pygments-main/doc/_static/logo_only.png +0 -0
  283. data/vendor/pygments-main/doc/_templates/docssidebar.html +0 -3
  284. data/vendor/pygments-main/doc/_templates/indexsidebar.html +0 -25
  285. data/vendor/pygments-main/doc/_themes/pygments14/layout.html +0 -98
  286. data/vendor/pygments-main/doc/_themes/pygments14/static/bodybg.png +0 -0
  287. data/vendor/pygments-main/doc/_themes/pygments14/static/docbg.png +0 -0
  288. data/vendor/pygments-main/doc/_themes/pygments14/static/listitem.png +0 -0
  289. data/vendor/pygments-main/doc/_themes/pygments14/static/logo.png +0 -0
  290. data/vendor/pygments-main/doc/_themes/pygments14/static/pocoo.png +0 -0
  291. data/vendor/pygments-main/doc/_themes/pygments14/static/pygments14.css_t +0 -401
  292. data/vendor/pygments-main/doc/_themes/pygments14/theme.conf +0 -15
  293. data/vendor/pygments-main/doc/conf.py +0 -241
  294. data/vendor/pygments-main/doc/docs/api.rst +0 -354
  295. data/vendor/pygments-main/doc/docs/authors.rst +0 -4
  296. data/vendor/pygments-main/doc/docs/changelog.rst +0 -1
  297. data/vendor/pygments-main/doc/docs/cmdline.rst +0 -166
  298. data/vendor/pygments-main/doc/docs/filterdevelopment.rst +0 -71
  299. data/vendor/pygments-main/doc/docs/filters.rst +0 -41
  300. data/vendor/pygments-main/doc/docs/formatterdevelopment.rst +0 -169
  301. data/vendor/pygments-main/doc/docs/formatters.rst +0 -48
  302. data/vendor/pygments-main/doc/docs/index.rst +0 -66
  303. data/vendor/pygments-main/doc/docs/integrate.rst +0 -40
  304. data/vendor/pygments-main/doc/docs/java.rst +0 -70
  305. data/vendor/pygments-main/doc/docs/lexerdevelopment.rst +0 -728
  306. data/vendor/pygments-main/doc/docs/lexers.rst +0 -69
  307. data/vendor/pygments-main/doc/docs/moinmoin.rst +0 -39
  308. data/vendor/pygments-main/doc/docs/plugins.rst +0 -93
  309. data/vendor/pygments-main/doc/docs/quickstart.rst +0 -205
  310. data/vendor/pygments-main/doc/docs/rstdirective.rst +0 -22
  311. data/vendor/pygments-main/doc/docs/styles.rst +0 -201
  312. data/vendor/pygments-main/doc/docs/tokens.rst +0 -372
  313. data/vendor/pygments-main/doc/docs/unicode.rst +0 -58
  314. data/vendor/pygments-main/doc/download.rst +0 -41
  315. data/vendor/pygments-main/doc/faq.rst +0 -139
  316. data/vendor/pygments-main/doc/index.rst +0 -54
  317. data/vendor/pygments-main/doc/languages.rst +0 -154
  318. data/vendor/pygments-main/doc/make.bat +0 -190
  319. data/vendor/pygments-main/doc/pygmentize.1 +0 -94
  320. data/vendor/pygments-main/external/autopygmentize +0 -101
  321. data/vendor/pygments-main/external/lasso-builtins-generator-9.lasso +0 -162
  322. data/vendor/pygments-main/external/markdown-processor.py +0 -67
  323. data/vendor/pygments-main/external/moin-parser.py +0 -112
  324. data/vendor/pygments-main/external/pygments.bashcomp +0 -38
  325. data/vendor/pygments-main/external/rst-directive.py +0 -82
  326. data/vendor/pygments-main/pygmentize +0 -8
  327. data/vendor/pygments-main/requirements.txt +0 -5
  328. data/vendor/pygments-main/scripts/check_sources.py +0 -211
  329. data/vendor/pygments-main/scripts/debug_lexer.py +0 -246
  330. data/vendor/pygments-main/scripts/detect_missing_analyse_text.py +0 -33
  331. data/vendor/pygments-main/scripts/epydoc.css +0 -280
  332. data/vendor/pygments-main/scripts/get_vimkw.py +0 -74
  333. data/vendor/pygments-main/scripts/pylintrc +0 -301
  334. data/vendor/pygments-main/scripts/vim2pygments.py +0 -935
  335. data/vendor/pygments-main/setup.cfg +0 -10
  336. data/vendor/pygments-main/setup.py +0 -77
  337. data/vendor/pygments-main/tox.ini +0 -7
  338. data/vendor/simplejson/.gitignore +0 -10
  339. data/vendor/simplejson/.travis.yml +0 -5
  340. data/vendor/simplejson/CHANGES.txt +0 -291
  341. data/vendor/simplejson/LICENSE.txt +0 -19
  342. data/vendor/simplejson/MANIFEST.in +0 -5
  343. data/vendor/simplejson/README.rst +0 -19
  344. data/vendor/simplejson/conf.py +0 -179
  345. data/vendor/simplejson/index.rst +0 -628
  346. data/vendor/simplejson/scripts/make_docs.py +0 -18
  347. data/vendor/simplejson/setup.py +0 -104
  348. data/vendor/simplejson/simplejson/__init__.py +0 -510
  349. data/vendor/simplejson/simplejson/_speedups.c +0 -2745
  350. data/vendor/simplejson/simplejson/decoder.py +0 -425
  351. data/vendor/simplejson/simplejson/encoder.py +0 -567
  352. data/vendor/simplejson/simplejson/ordered_dict.py +0 -119
  353. data/vendor/simplejson/simplejson/scanner.py +0 -77
  354. data/vendor/simplejson/simplejson/tests/__init__.py +0 -67
  355. data/vendor/simplejson/simplejson/tests/test_bigint_as_string.py +0 -55
  356. data/vendor/simplejson/simplejson/tests/test_check_circular.py +0 -30
  357. data/vendor/simplejson/simplejson/tests/test_decimal.py +0 -66
  358. data/vendor/simplejson/simplejson/tests/test_decode.py +0 -83
  359. data/vendor/simplejson/simplejson/tests/test_default.py +0 -9
  360. data/vendor/simplejson/simplejson/tests/test_dump.py +0 -67
  361. data/vendor/simplejson/simplejson/tests/test_encode_basestring_ascii.py +0 -46
  362. data/vendor/simplejson/simplejson/tests/test_encode_for_html.py +0 -32
  363. data/vendor/simplejson/simplejson/tests/test_errors.py +0 -34
  364. data/vendor/simplejson/simplejson/tests/test_fail.py +0 -91
  365. data/vendor/simplejson/simplejson/tests/test_float.py +0 -19
  366. data/vendor/simplejson/simplejson/tests/test_indent.py +0 -86
  367. data/vendor/simplejson/simplejson/tests/test_item_sort_key.py +0 -20
  368. data/vendor/simplejson/simplejson/tests/test_namedtuple.py +0 -121
  369. data/vendor/simplejson/simplejson/tests/test_pass1.py +0 -76
  370. data/vendor/simplejson/simplejson/tests/test_pass2.py +0 -14
  371. data/vendor/simplejson/simplejson/tests/test_pass3.py +0 -20
  372. data/vendor/simplejson/simplejson/tests/test_recursion.py +0 -67
  373. data/vendor/simplejson/simplejson/tests/test_scanstring.py +0 -117
  374. data/vendor/simplejson/simplejson/tests/test_separators.py +0 -42
  375. data/vendor/simplejson/simplejson/tests/test_speedups.py +0 -20
  376. data/vendor/simplejson/simplejson/tests/test_tuple.py +0 -49
  377. data/vendor/simplejson/simplejson/tests/test_unicode.py +0 -109
  378. data/vendor/simplejson/simplejson/tool.py +0 -39
@@ -0,0 +1,79 @@
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ pygments.lexers.smv
4
+ ~~~~~~~~~~~~~~~~~~~
5
+
6
+ Lexers for the SMV languages.
7
+
8
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
9
+ :license: BSD, see LICENSE for details.
10
+ """
11
+
12
+ from pygments.lexer import RegexLexer, words
13
+ from pygments.token import Comment, Keyword, Name, Number, Operator, \
14
+ Punctuation, Text
15
+
16
+ __all__ = ['NuSMVLexer']
17
+
18
+
19
+ class NuSMVLexer(RegexLexer):
20
+ """
21
+ Lexer for the NuSMV language.
22
+
23
+ .. versionadded:: 2.2
24
+ """
25
+
26
+ name = 'NuSMV'
27
+ aliases = ['nusmv']
28
+ filenames = ['*.smv']
29
+ mimetypes = []
30
+
31
+ tokens = {
32
+ 'root': [
33
+ # Comments
34
+ (r'(?s)\/\-\-.*?\-\-/', Comment),
35
+ (r'--.*\n', Comment),
36
+
37
+ # Reserved
38
+ (words(('MODULE', 'DEFINE', 'MDEFINE', 'CONSTANTS', 'VAR', 'IVAR',
39
+ 'FROZENVAR', 'INIT', 'TRANS', 'INVAR', 'SPEC', 'CTLSPEC',
40
+ 'LTLSPEC', 'PSLSPEC', 'COMPUTE', 'NAME', 'INVARSPEC',
41
+ 'FAIRNESS', 'JUSTICE', 'COMPASSION', 'ISA', 'ASSIGN',
42
+ 'CONSTRAINT', 'SIMPWFF', 'CTLWFF', 'LTLWFF', 'PSLWFF',
43
+ 'COMPWFF', 'IN', 'MIN', 'MAX', 'MIRROR', 'PRED',
44
+ 'PREDICATES'), suffix=r'(?![\w$#-])'),
45
+ Keyword.Declaration),
46
+ (r'process(?![\w$#-])', Keyword),
47
+ (words(('array', 'of', 'boolean', 'integer', 'real', 'word'),
48
+ suffix=r'(?![\w$#-])'), Keyword.Type),
49
+ (words(('case', 'esac'), suffix=r'(?![\w$#-])'), Keyword),
50
+ (words(('word1', 'bool', 'signed', 'unsigned', 'extend', 'resize',
51
+ 'sizeof', 'uwconst', 'swconst', 'init', 'self', 'count',
52
+ 'abs', 'max', 'min'), suffix=r'(?![\w$#-])'),
53
+ Name.Builtin),
54
+ (words(('EX', 'AX', 'EF', 'AF', 'EG', 'AG', 'E', 'F', 'O', 'G',
55
+ 'H', 'X', 'Y', 'Z', 'A', 'U', 'S', 'V', 'T', 'BU', 'EBF',
56
+ 'ABF', 'EBG', 'ABG', 'next', 'mod', 'union', 'in', 'xor',
57
+ 'xnor'), suffix=r'(?![\w$#-])'),
58
+ Operator.Word),
59
+ (words(('TRUE', 'FALSE'), suffix=r'(?![\w$#-])'), Keyword.Constant),
60
+
61
+ # Names
62
+ (r'[a-zA-Z_][\w$#-]*', Name.Variable),
63
+
64
+ # Operators
65
+ (r':=', Operator),
66
+ (r'[-&|+*/<>!=]', Operator),
67
+
68
+ # Literals
69
+ (r'\-?\d+\b', Number.Integer),
70
+ (r'0[su][bB]\d*_[01_]+', Number.Bin),
71
+ (r'0[su][oO]\d*_[0-7_]+', Number.Oct),
72
+ (r'0[su][dD]\d*_[\d_]+', Number.Dec),
73
+ (r'0[su][hH]\d*_[\da-fA-F_]+', Number.Hex),
74
+
75
+ # Whitespace, punctuation and the rest
76
+ (r'\s+', Text.Whitespace),
77
+ (r'[()\[\]{};?:.,]', Punctuation),
78
+ ],
79
+ }
@@ -0,0 +1,83 @@
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ pygments.lexers.snobol
4
+ ~~~~~~~~~~~~~~~~~~~~~~
5
+
6
+ Lexers for the SNOBOL language.
7
+
8
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
9
+ :license: BSD, see LICENSE for details.
10
+ """
11
+
12
+ from pygments.lexer import RegexLexer, bygroups
13
+ from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
14
+ Number, Punctuation
15
+
16
+ __all__ = ['SnobolLexer']
17
+
18
+
19
+ class SnobolLexer(RegexLexer):
20
+ """
21
+ Lexer for the SNOBOL4 programming language.
22
+
23
+ Recognizes the common ASCII equivalents of the original SNOBOL4 operators.
24
+ Does not require spaces around binary operators.
25
+
26
+ .. versionadded:: 1.5
27
+ """
28
+
29
+ name = "Snobol"
30
+ aliases = ["snobol"]
31
+ filenames = ['*.snobol']
32
+ mimetypes = ['text/x-snobol']
33
+
34
+ tokens = {
35
+ # root state, start of line
36
+ # comments, continuation lines, and directives start in column 1
37
+ # as do labels
38
+ 'root': [
39
+ (r'\*.*\n', Comment),
40
+ (r'[+.] ', Punctuation, 'statement'),
41
+ (r'-.*\n', Comment),
42
+ (r'END\s*\n', Name.Label, 'heredoc'),
43
+ (r'[A-Za-z$][\w$]*', Name.Label, 'statement'),
44
+ (r'\s+', Text, 'statement'),
45
+ ],
46
+ # statement state, line after continuation or label
47
+ 'statement': [
48
+ (r'\s*\n', Text, '#pop'),
49
+ (r'\s+', Text),
50
+ (r'(?<=[^\w.])(LT|LE|EQ|NE|GE|GT|INTEGER|IDENT|DIFFER|LGT|SIZE|'
51
+ r'REPLACE|TRIM|DUPL|REMDR|DATE|TIME|EVAL|APPLY|OPSYN|LOAD|UNLOAD|'
52
+ r'LEN|SPAN|BREAK|ANY|NOTANY|TAB|RTAB|REM|POS|RPOS|FAIL|FENCE|'
53
+ r'ABORT|ARB|ARBNO|BAL|SUCCEED|INPUT|OUTPUT|TERMINAL)(?=[^\w.])',
54
+ Name.Builtin),
55
+ (r'[A-Za-z][\w.]*', Name),
56
+ # ASCII equivalents of original operators
57
+ # | for the EBCDIC equivalent, ! likewise
58
+ # \ for EBCDIC negation
59
+ (r'\*\*|[?$.!%*/#+\-@|&\\=]', Operator),
60
+ (r'"[^"]*"', String),
61
+ (r"'[^']*'", String),
62
+ # Accept SPITBOL syntax for real numbers
63
+ # as well as Macro SNOBOL4
64
+ (r'[0-9]+(?=[^.EeDd])', Number.Integer),
65
+ (r'[0-9]+(\.[0-9]*)?([EDed][-+]?[0-9]+)?', Number.Float),
66
+ # Goto
67
+ (r':', Punctuation, 'goto'),
68
+ (r'[()<>,;]', Punctuation),
69
+ ],
70
+ # Goto block
71
+ 'goto': [
72
+ (r'\s*\n', Text, "#pop:2"),
73
+ (r'\s+', Text),
74
+ (r'F|S', Keyword),
75
+ (r'(\()([A-Za-z][\w.]*)(\))',
76
+ bygroups(Punctuation, Name.Label, Punctuation))
77
+ ],
78
+ # everything after the END statement is basically one
79
+ # big heredoc.
80
+ 'heredoc': [
81
+ (r'.*\n', String.Heredoc)
82
+ ]
83
+ }
@@ -0,0 +1,92 @@
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ pygments.lexers.solidity
4
+ ~~~~~~~~~~~~~~~~~~~~~~~~
5
+
6
+ Lexers for Solidity.
7
+
8
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
9
+ :license: BSD, see LICENSE for details.
10
+ """
11
+
12
+ import re
13
+
14
+ from pygments.lexer import RegexLexer, bygroups, include, words
15
+ from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
16
+ Number, Punctuation, Whitespace
17
+
18
+ __all__ = ['SolidityLexer']
19
+
20
+
21
+ class SolidityLexer(RegexLexer):
22
+ """
23
+ For Solidity source code.
24
+
25
+ .. versionadded:: 2.5
26
+ """
27
+
28
+ name = 'Solidity'
29
+ aliases = ['solidity']
30
+ filenames = ['*.sol']
31
+ mimetypes = []
32
+
33
+ flags = re.MULTILINE | re.UNICODE
34
+
35
+ datatype = (
36
+ r'\b(address|bool|(?:(?:bytes|hash|int|string|uint)(?:8|16|24|32|40|48|56|64'
37
+ r'|72|80|88|96|104|112|120|128|136|144|152|160|168|176|184|192|200|208'
38
+ r'|216|224|232|240|248|256)?))\b'
39
+ )
40
+
41
+ tokens = {
42
+ 'root': [
43
+ include('whitespace'),
44
+ include('comments'),
45
+ (r'\bpragma\s+solidity\b', Keyword, 'pragma'),
46
+ (r'\b(contract)(\s+)([a-zA-Z_]\w*)',
47
+ bygroups(Keyword, Whitespace, Name.Entity)),
48
+ (datatype + r'(\s+)((?:external|public|internal|private)\s+)?' +
49
+ r'([a-zA-Z_]\w*)',
50
+ bygroups(Keyword.Type, Whitespace, Keyword, Name.Variable)),
51
+ (r'\b(enum|event|function|struct)(\s+)([a-zA-Z_]\w*)',
52
+ bygroups(Keyword.Type, Whitespace, Name.Variable)),
53
+ (r'\b(msg|block|tx)\.([A-Za-z_][a-zA-Z0-9_]*)\b', Keyword),
54
+ (words((
55
+ 'block', 'break', 'constant', 'constructor', 'continue',
56
+ 'contract', 'do', 'else', 'external', 'false', 'for',
57
+ 'function', 'if', 'import', 'inherited', 'internal', 'is',
58
+ 'library', 'mapping', 'memory', 'modifier', 'msg', 'new',
59
+ 'payable', 'private', 'public', 'require', 'return',
60
+ 'returns', 'struct', 'suicide', 'throw', 'this', 'true',
61
+ 'tx', 'var', 'while'), prefix=r'\b', suffix=r'\b'),
62
+ Keyword.Type),
63
+ (words(('keccak256',), prefix=r'\b', suffix=r'\b'), Name.Builtin),
64
+ (datatype, Keyword.Type),
65
+ include('constants'),
66
+ (r'[a-zA-Z_]\w*', Text),
67
+ (r'[!<=>+*/-]', Operator),
68
+ (r'[.;:{}(),\[\]]', Punctuation)
69
+ ],
70
+ 'comments': [
71
+ (r'//(\n|[\w\W]*?[^\\]\n)', Comment.Single),
72
+ (r'/(\\\n)?[*][\w\W]*?[*](\\\n)?/', Comment.Multiline),
73
+ (r'/(\\\n)?[*][\w\W]*', Comment.Multiline)
74
+ ],
75
+ 'constants': [
76
+ (r'("(\\"|.)*?")', String.Double),
77
+ (r"('(\\'|.)*?')", String.Single),
78
+ (r'\b0[xX][0-9a-fA-F]+\b', Number.Hex),
79
+ (r'\b\d+\b', Number.Decimal),
80
+ ],
81
+ 'pragma': [
82
+ include('whitespace'),
83
+ include('comments'),
84
+ (r'(\^|>=|<)(\s*)(\d+\.\d+\.\d+)',
85
+ bygroups(Operator, Whitespace, Keyword)),
86
+ (r';', Punctuation, '#pop')
87
+ ],
88
+ 'whitespace': [
89
+ (r'\s+', Whitespace),
90
+ (r'\n', Whitespace)
91
+ ]
92
+ }
@@ -0,0 +1,105 @@
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ pygments.lexers.special
4
+ ~~~~~~~~~~~~~~~~~~~~~~~
5
+
6
+ Special lexers.
7
+
8
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
9
+ :license: BSD, see LICENSE for details.
10
+ """
11
+
12
+ import re
13
+ from io import BytesIO
14
+
15
+ from pygments.lexer import Lexer
16
+ from pygments.token import Token, Error, Text
17
+ from pygments.util import get_choice_opt
18
+
19
+
20
+ __all__ = ['TextLexer', 'RawTokenLexer']
21
+
22
+
23
+ class TextLexer(Lexer):
24
+ """
25
+ "Null" lexer, doesn't highlight anything.
26
+ """
27
+ name = 'Text only'
28
+ aliases = ['text']
29
+ filenames = ['*.txt']
30
+ mimetypes = ['text/plain']
31
+ priority = 0.01
32
+
33
+ def get_tokens_unprocessed(self, text):
34
+ yield 0, Text, text
35
+
36
+ def analyse_text(text):
37
+ return TextLexer.priority
38
+
39
+
40
+ _ttype_cache = {}
41
+
42
+ line_re = re.compile(b'.*?\n')
43
+
44
+
45
+ class RawTokenLexer(Lexer):
46
+ """
47
+ Recreate a token stream formatted with the `RawTokenFormatter`. This
48
+ lexer raises exceptions during parsing if the token stream in the
49
+ file is malformed.
50
+
51
+ Additional options accepted:
52
+
53
+ `compress`
54
+ If set to ``"gz"`` or ``"bz2"``, decompress the token stream with
55
+ the given compression algorithm before lexing (default: ``""``).
56
+ """
57
+ name = 'Raw token data'
58
+ aliases = ['raw']
59
+ filenames = []
60
+ mimetypes = ['application/x-pygments-tokens']
61
+
62
+ def __init__(self, **options):
63
+ self.compress = get_choice_opt(options, 'compress',
64
+ ['', 'none', 'gz', 'bz2'], '')
65
+ Lexer.__init__(self, **options)
66
+
67
+ def get_tokens(self, text):
68
+ if isinstance(text, str):
69
+ # raw token stream never has any non-ASCII characters
70
+ text = text.encode('ascii')
71
+ if self.compress == 'gz':
72
+ import gzip
73
+ gzipfile = gzip.GzipFile('', 'rb', 9, BytesIO(text))
74
+ text = gzipfile.read()
75
+ elif self.compress == 'bz2':
76
+ import bz2
77
+ text = bz2.decompress(text)
78
+
79
+ # do not call Lexer.get_tokens() because we do not want Unicode
80
+ # decoding to occur, and stripping is not optional.
81
+ text = text.strip(b'\n') + b'\n'
82
+ for i, t, v in self.get_tokens_unprocessed(text):
83
+ yield t, v
84
+
85
+ def get_tokens_unprocessed(self, text):
86
+ length = 0
87
+ for match in line_re.finditer(text):
88
+ try:
89
+ ttypestr, val = match.group().split(b'\t', 1)
90
+ except ValueError:
91
+ val = match.group().decode('ascii', 'replace')
92
+ ttype = Error
93
+ else:
94
+ ttype = _ttype_cache.get(ttypestr)
95
+ if not ttype:
96
+ ttype = Token
97
+ ttypes = ttypestr.split('.')[1:]
98
+ for ttype_ in ttypes:
99
+ if not ttype_ or not ttype_[0].isupper():
100
+ raise ValueError('malformed token name')
101
+ ttype = getattr(ttype, ttype_)
102
+ _ttype_cache[ttypestr] = ttype
103
+ val = val[2:-2].decode('unicode-escape')
104
+ yield length, ttype, val
105
+ length += len(val)
@@ -0,0 +1,837 @@
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ pygments.lexers.sql
4
+ ~~~~~~~~~~~~~~~~~~~
5
+
6
+ Lexers for various SQL dialects and related interactive sessions.
7
+
8
+ Postgres specific lexers:
9
+
10
+ `PostgresLexer`
11
+ A SQL lexer for the PostgreSQL dialect. Differences w.r.t. the SQL
12
+ lexer are:
13
+
14
+ - keywords and data types list parsed from the PG docs (run the
15
+ `_postgres_builtins` module to update them);
16
+ - Content of $-strings parsed using a specific lexer, e.g. the content
17
+ of a PL/Python function is parsed using the Python lexer;
18
+ - parse PG specific constructs: E-strings, $-strings, U&-strings,
19
+ different operators and punctuation.
20
+
21
+ `PlPgsqlLexer`
22
+ A lexer for the PL/pgSQL language. Adds a few specific construct on
23
+ top of the PG SQL lexer (such as <<label>>).
24
+
25
+ `PostgresConsoleLexer`
26
+ A lexer to highlight an interactive psql session:
27
+
28
+ - identifies the prompt and does its best to detect the end of command
29
+ in multiline statement where not all the lines are prefixed by a
30
+ prompt, telling them apart from the output;
31
+ - highlights errors in the output and notification levels;
32
+ - handles psql backslash commands.
33
+
34
+ The ``tests/examplefiles`` contains a few test files with data to be
35
+ parsed by these lexers.
36
+
37
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
38
+ :license: BSD, see LICENSE for details.
39
+ """
40
+
41
+ import re
42
+
43
+ from pygments.lexer import Lexer, RegexLexer, do_insertions, bygroups, words
44
+ from pygments.token import Punctuation, Whitespace, Text, Comment, Operator, \
45
+ Keyword, Name, String, Number, Generic, Literal
46
+ from pygments.lexers import get_lexer_by_name, ClassNotFound
47
+
48
+ from pygments.lexers._postgres_builtins import KEYWORDS, DATATYPES, \
49
+ PSEUDO_TYPES, PLPGSQL_KEYWORDS
50
+ from pygments.lexers._mysql_builtins import \
51
+ MYSQL_CONSTANTS, \
52
+ MYSQL_DATATYPES, \
53
+ MYSQL_FUNCTIONS, \
54
+ MYSQL_KEYWORDS, \
55
+ MYSQL_OPTIMIZER_HINTS
56
+
57
+ from pygments.lexers import _tsql_builtins
58
+
59
+
60
+ __all__ = ['PostgresLexer', 'PlPgsqlLexer', 'PostgresConsoleLexer',
61
+ 'SqlLexer', 'TransactSqlLexer', 'MySqlLexer',
62
+ 'SqliteConsoleLexer', 'RqlLexer']
63
+
64
+ line_re = re.compile('.*?\n')
65
+
66
+ language_re = re.compile(r"\s+LANGUAGE\s+'?(\w+)'?", re.IGNORECASE)
67
+
68
+ do_re = re.compile(r'\bDO\b', re.IGNORECASE)
69
+
70
+ # Regular expressions for analyse_text()
71
+ name_between_bracket_re = re.compile(r'\[[a-zA-Z_]\w*\]')
72
+ name_between_backtick_re = re.compile(r'`[a-zA-Z_]\w*`')
73
+ tsql_go_re = re.compile(r'\bgo\b', re.IGNORECASE)
74
+ tsql_declare_re = re.compile(r'\bdeclare\s+@', re.IGNORECASE)
75
+ tsql_variable_re = re.compile(r'@[a-zA-Z_]\w*\b')
76
+
77
+
78
+ def language_callback(lexer, match):
79
+ """Parse the content of a $-string using a lexer
80
+
81
+ The lexer is chosen looking for a nearby LANGUAGE or assumed as
82
+ plpgsql if inside a DO statement and no LANGUAGE has been found.
83
+ """
84
+ lx = None
85
+ m = language_re.match(lexer.text[match.end():match.end()+100])
86
+ if m is not None:
87
+ lx = lexer._get_lexer(m.group(1))
88
+ else:
89
+ m = list(language_re.finditer(
90
+ lexer.text[max(0, match.start()-100):match.start()]))
91
+ if m:
92
+ lx = lexer._get_lexer(m[-1].group(1))
93
+ else:
94
+ m = list(do_re.finditer(
95
+ lexer.text[max(0, match.start()-25):match.start()]))
96
+ if m:
97
+ lx = lexer._get_lexer('plpgsql')
98
+
99
+ # 1 = $, 2 = delimiter, 3 = $
100
+ yield (match.start(1), String, match.group(1))
101
+ yield (match.start(2), String.Delimiter, match.group(2))
102
+ yield (match.start(3), String, match.group(3))
103
+ # 4 = string contents
104
+ if lx:
105
+ for x in lx.get_tokens_unprocessed(match.group(4)):
106
+ yield x
107
+ else:
108
+ yield (match.start(4), String, match.group(4))
109
+ # 5 = $, 6 = delimiter, 7 = $
110
+ yield (match.start(5), String, match.group(5))
111
+ yield (match.start(6), String.Delimiter, match.group(6))
112
+ yield (match.start(7), String, match.group(7))
113
+
114
+
115
+ class PostgresBase:
116
+ """Base class for Postgres-related lexers.
117
+
118
+ This is implemented as a mixin to avoid the Lexer metaclass kicking in.
119
+ this way the different lexer don't have a common Lexer ancestor. If they
120
+ had, _tokens could be created on this ancestor and not updated for the
121
+ other classes, resulting e.g. in PL/pgSQL parsed as SQL. This shortcoming
122
+ seem to suggest that regexp lexers are not really subclassable.
123
+ """
124
+ def get_tokens_unprocessed(self, text, *args):
125
+ # Have a copy of the entire text to be used by `language_callback`.
126
+ self.text = text
127
+ yield from super().get_tokens_unprocessed(text, *args)
128
+
129
+ def _get_lexer(self, lang):
130
+ if lang.lower() == 'sql':
131
+ return get_lexer_by_name('postgresql', **self.options)
132
+
133
+ tries = [lang]
134
+ if lang.startswith('pl'):
135
+ tries.append(lang[2:])
136
+ if lang.endswith('u'):
137
+ tries.append(lang[:-1])
138
+ if lang.startswith('pl') and lang.endswith('u'):
139
+ tries.append(lang[2:-1])
140
+
141
+ for lx in tries:
142
+ try:
143
+ return get_lexer_by_name(lx, **self.options)
144
+ except ClassNotFound:
145
+ pass
146
+ else:
147
+ # TODO: better logging
148
+ # print >>sys.stderr, "language not found:", lang
149
+ return None
150
+
151
+
152
+ class PostgresLexer(PostgresBase, RegexLexer):
153
+ """
154
+ Lexer for the PostgreSQL dialect of SQL.
155
+
156
+ .. versionadded:: 1.5
157
+ """
158
+
159
+ name = 'PostgreSQL SQL dialect'
160
+ aliases = ['postgresql', 'postgres']
161
+ mimetypes = ['text/x-postgresql']
162
+
163
+ flags = re.IGNORECASE
164
+ tokens = {
165
+ 'root': [
166
+ (r'\s+', Text),
167
+ (r'--.*\n?', Comment.Single),
168
+ (r'/\*', Comment.Multiline, 'multiline-comments'),
169
+ (r'(' + '|'.join(s.replace(" ", r"\s+")
170
+ for s in DATATYPES + PSEUDO_TYPES) + r')\b',
171
+ Name.Builtin),
172
+ (words(KEYWORDS, suffix=r'\b'), Keyword),
173
+ (r'[+*/<>=~!@#%^&|`?-]+', Operator),
174
+ (r'::', Operator), # cast
175
+ (r'\$\d+', Name.Variable),
176
+ (r'([0-9]*\.[0-9]*|[0-9]+)(e[+-]?[0-9]+)?', Number.Float),
177
+ (r'[0-9]+', Number.Integer),
178
+ (r"((?:E|U&)?)(')", bygroups(String.Affix, String.Single), 'string'),
179
+ # quoted identifier
180
+ (r'((?:U&)?)(")', bygroups(String.Affix, String.Name), 'quoted-ident'),
181
+ (r'(?s)(\$)([^$]*)(\$)(.*?)(\$)(\2)(\$)', language_callback),
182
+ (r'[a-z_]\w*', Name),
183
+
184
+ # psql variable in SQL
185
+ (r""":(['"]?)[a-z]\w*\b\1""", Name.Variable),
186
+
187
+ (r'[;:()\[\]{},.]', Punctuation),
188
+ ],
189
+ 'multiline-comments': [
190
+ (r'/\*', Comment.Multiline, 'multiline-comments'),
191
+ (r'\*/', Comment.Multiline, '#pop'),
192
+ (r'[^/*]+', Comment.Multiline),
193
+ (r'[/*]', Comment.Multiline)
194
+ ],
195
+ 'string': [
196
+ (r"[^']+", String.Single),
197
+ (r"''", String.Single),
198
+ (r"'", String.Single, '#pop'),
199
+ ],
200
+ 'quoted-ident': [
201
+ (r'[^"]+', String.Name),
202
+ (r'""', String.Name),
203
+ (r'"', String.Name, '#pop'),
204
+ ],
205
+ }
206
+
207
+
208
+ class PlPgsqlLexer(PostgresBase, RegexLexer):
209
+ """
210
+ Handle the extra syntax in Pl/pgSQL language.
211
+
212
+ .. versionadded:: 1.5
213
+ """
214
+ name = 'PL/pgSQL'
215
+ aliases = ['plpgsql']
216
+ mimetypes = ['text/x-plpgsql']
217
+
218
+ flags = re.IGNORECASE
219
+ tokens = {k: l[:] for (k, l) in PostgresLexer.tokens.items()}
220
+
221
+ # extend the keywords list
222
+ for i, pattern in enumerate(tokens['root']):
223
+ if pattern[1] == Keyword:
224
+ tokens['root'][i] = (
225
+ words(KEYWORDS + PLPGSQL_KEYWORDS, suffix=r'\b'),
226
+ Keyword)
227
+ del i
228
+ break
229
+ else:
230
+ assert 0, "SQL keywords not found"
231
+
232
+ # Add specific PL/pgSQL rules (before the SQL ones)
233
+ tokens['root'][:0] = [
234
+ (r'\%[a-z]\w*\b', Name.Builtin), # actually, a datatype
235
+ (r':=', Operator),
236
+ (r'\<\<[a-z]\w*\>\>', Name.Label),
237
+ (r'\#[a-z]\w*\b', Keyword.Pseudo), # #variable_conflict
238
+ ]
239
+
240
+
241
+ class PsqlRegexLexer(PostgresBase, RegexLexer):
242
+ """
243
+ Extend the PostgresLexer adding support specific for psql commands.
244
+
245
+ This is not a complete psql lexer yet as it lacks prompt support
246
+ and output rendering.
247
+ """
248
+
249
+ name = 'PostgreSQL console - regexp based lexer'
250
+ aliases = [] # not public
251
+
252
+ flags = re.IGNORECASE
253
+ tokens = {k: l[:] for (k, l) in PostgresLexer.tokens.items()}
254
+
255
+ tokens['root'].append(
256
+ (r'\\[^\s]+', Keyword.Pseudo, 'psql-command'))
257
+ tokens['psql-command'] = [
258
+ (r'\n', Text, 'root'),
259
+ (r'\s+', Text),
260
+ (r'\\[^\s]+', Keyword.Pseudo),
261
+ (r""":(['"]?)[a-z]\w*\b\1""", Name.Variable),
262
+ (r"'(''|[^'])*'", String.Single),
263
+ (r"`([^`])*`", String.Backtick),
264
+ (r"[^\s]+", String.Symbol),
265
+ ]
266
+
267
+
268
+ re_prompt = re.compile(r'^(\S.*?)??[=\-\(\$\'\"][#>]')
269
+ re_psql_command = re.compile(r'\s*\\')
270
+ re_end_command = re.compile(r';\s*(--.*?)?$')
271
+ re_psql_command = re.compile(r'(\s*)(\\.+?)(\s+)$')
272
+ re_error = re.compile(r'(ERROR|FATAL):')
273
+ re_message = re.compile(
274
+ r'((?:DEBUG|INFO|NOTICE|WARNING|ERROR|'
275
+ r'FATAL|HINT|DETAIL|CONTEXT|LINE [0-9]+):)(.*?\n)')
276
+
277
+
278
+ class lookahead:
279
+ """Wrap an iterator and allow pushing back an item."""
280
+ def __init__(self, x):
281
+ self.iter = iter(x)
282
+ self._nextitem = None
283
+
284
+ def __iter__(self):
285
+ return self
286
+
287
+ def send(self, i):
288
+ self._nextitem = i
289
+ return i
290
+
291
+ def __next__(self):
292
+ if self._nextitem is not None:
293
+ ni = self._nextitem
294
+ self._nextitem = None
295
+ return ni
296
+ return next(self.iter)
297
+ next = __next__
298
+
299
+
300
+ class PostgresConsoleLexer(Lexer):
301
+ """
302
+ Lexer for psql sessions.
303
+
304
+ .. versionadded:: 1.5
305
+ """
306
+
307
+ name = 'PostgreSQL console (psql)'
308
+ aliases = ['psql', 'postgresql-console', 'postgres-console']
309
+ mimetypes = ['text/x-postgresql-psql']
310
+
311
+ def get_tokens_unprocessed(self, data):
312
+ sql = PsqlRegexLexer(**self.options)
313
+
314
+ lines = lookahead(line_re.findall(data))
315
+
316
+ # prompt-output cycle
317
+ while 1:
318
+
319
+ # consume the lines of the command: start with an optional prompt
320
+ # and continue until the end of command is detected
321
+ curcode = ''
322
+ insertions = []
323
+ for line in lines:
324
+ # Identify a shell prompt in case of psql commandline example
325
+ if line.startswith('$') and not curcode:
326
+ lexer = get_lexer_by_name('console', **self.options)
327
+ yield from lexer.get_tokens_unprocessed(line)
328
+ break
329
+
330
+ # Identify a psql prompt
331
+ mprompt = re_prompt.match(line)
332
+ if mprompt is not None:
333
+ insertions.append((len(curcode),
334
+ [(0, Generic.Prompt, mprompt.group())]))
335
+ curcode += line[len(mprompt.group()):]
336
+ else:
337
+ curcode += line
338
+
339
+ # Check if this is the end of the command
340
+ # TODO: better handle multiline comments at the end with
341
+ # a lexer with an external state?
342
+ if re_psql_command.match(curcode) \
343
+ or re_end_command.search(curcode):
344
+ break
345
+
346
+ # Emit the combined stream of command and prompt(s)
347
+ yield from do_insertions(insertions,
348
+ sql.get_tokens_unprocessed(curcode))
349
+
350
+ # Emit the output lines
351
+ out_token = Generic.Output
352
+ for line in lines:
353
+ mprompt = re_prompt.match(line)
354
+ if mprompt is not None:
355
+ # push the line back to have it processed by the prompt
356
+ lines.send(line)
357
+ break
358
+
359
+ mmsg = re_message.match(line)
360
+ if mmsg is not None:
361
+ if mmsg.group(1).startswith("ERROR") \
362
+ or mmsg.group(1).startswith("FATAL"):
363
+ out_token = Generic.Error
364
+ yield (mmsg.start(1), Generic.Strong, mmsg.group(1))
365
+ yield (mmsg.start(2), out_token, mmsg.group(2))
366
+ else:
367
+ yield (0, out_token, line)
368
+ else:
369
+ return
370
+
371
+
372
+ class SqlLexer(RegexLexer):
373
+ """
374
+ Lexer for Structured Query Language. Currently, this lexer does
375
+ not recognize any special syntax except ANSI SQL.
376
+ """
377
+
378
+ name = 'SQL'
379
+ aliases = ['sql']
380
+ filenames = ['*.sql']
381
+ mimetypes = ['text/x-sql']
382
+
383
+ flags = re.IGNORECASE
384
+ tokens = {
385
+ 'root': [
386
+ (r'\s+', Text),
387
+ (r'--.*\n?', Comment.Single),
388
+ (r'/\*', Comment.Multiline, 'multiline-comments'),
389
+ (words((
390
+ 'ABORT', 'ABS', 'ABSOLUTE', 'ACCESS', 'ADA', 'ADD', 'ADMIN', 'AFTER',
391
+ 'AGGREGATE', 'ALIAS', 'ALL', 'ALLOCATE', 'ALTER', 'ANALYSE', 'ANALYZE',
392
+ 'AND', 'ANY', 'ARE', 'AS', 'ASC', 'ASENSITIVE', 'ASSERTION', 'ASSIGNMENT',
393
+ 'ASYMMETRIC', 'AT', 'ATOMIC', 'AUTHORIZATION', 'AVG', 'BACKWARD',
394
+ 'BEFORE', 'BEGIN', 'BETWEEN', 'BITVAR', 'BIT_LENGTH', 'BOTH', 'BREADTH',
395
+ 'BY', 'C', 'CACHE', 'CALL', 'CALLED', 'CARDINALITY', 'CASCADE',
396
+ 'CASCADED', 'CASE', 'CAST', 'CATALOG', 'CATALOG_NAME', 'CHAIN',
397
+ 'CHARACTERISTICS', 'CHARACTER_LENGTH', 'CHARACTER_SET_CATALOG',
398
+ 'CHARACTER_SET_NAME', 'CHARACTER_SET_SCHEMA', 'CHAR_LENGTH', 'CHECK',
399
+ 'CHECKED', 'CHECKPOINT', 'CLASS', 'CLASS_ORIGIN', 'CLOB', 'CLOSE',
400
+ 'CLUSTER', 'COALSECE', 'COBOL', 'COLLATE', 'COLLATION',
401
+ 'COLLATION_CATALOG', 'COLLATION_NAME', 'COLLATION_SCHEMA', 'COLUMN',
402
+ 'COLUMN_NAME', 'COMMAND_FUNCTION', 'COMMAND_FUNCTION_CODE', 'COMMENT',
403
+ 'COMMIT', 'COMMITTED', 'COMPLETION', 'CONDITION_NUMBER', 'CONNECT',
404
+ 'CONNECTION', 'CONNECTION_NAME', 'CONSTRAINT', 'CONSTRAINTS',
405
+ 'CONSTRAINT_CATALOG', 'CONSTRAINT_NAME', 'CONSTRAINT_SCHEMA',
406
+ 'CONSTRUCTOR', 'CONTAINS', 'CONTINUE', 'CONVERSION', 'CONVERT',
407
+ 'COPY', 'CORRESPONTING', 'COUNT', 'CREATE', 'CREATEDB', 'CREATEUSER',
408
+ 'CROSS', 'CUBE', 'CURRENT', 'CURRENT_DATE', 'CURRENT_PATH',
409
+ 'CURRENT_ROLE', 'CURRENT_TIME', 'CURRENT_TIMESTAMP', 'CURRENT_USER',
410
+ 'CURSOR', 'CURSOR_NAME', 'CYCLE', 'DATA', 'DATABASE',
411
+ 'DATETIME_INTERVAL_CODE', 'DATETIME_INTERVAL_PRECISION', 'DAY',
412
+ 'DEALLOCATE', 'DECLARE', 'DEFAULT', 'DEFAULTS', 'DEFERRABLE',
413
+ 'DEFERRED', 'DEFINED', 'DEFINER', 'DELETE', 'DELIMITER', 'DELIMITERS',
414
+ 'DEREF', 'DESC', 'DESCRIBE', 'DESCRIPTOR', 'DESTROY', 'DESTRUCTOR',
415
+ 'DETERMINISTIC', 'DIAGNOSTICS', 'DICTIONARY', 'DISCONNECT', 'DISPATCH',
416
+ 'DISTINCT', 'DO', 'DOMAIN', 'DROP', 'DYNAMIC', 'DYNAMIC_FUNCTION',
417
+ 'DYNAMIC_FUNCTION_CODE', 'EACH', 'ELSE', 'ELSIF', 'ENCODING',
418
+ 'ENCRYPTED', 'END', 'END-EXEC', 'EQUALS', 'ESCAPE', 'EVERY', 'EXCEPTION',
419
+ 'EXCEPT', 'EXCLUDING', 'EXCLUSIVE', 'EXEC', 'EXECUTE', 'EXISTING',
420
+ 'EXISTS', 'EXPLAIN', 'EXTERNAL', 'EXTRACT', 'FALSE', 'FETCH', 'FINAL',
421
+ 'FIRST', 'FOR', 'FORCE', 'FOREIGN', 'FORTRAN', 'FORWARD', 'FOUND', 'FREE',
422
+ 'FREEZE', 'FROM', 'FULL', 'FUNCTION', 'G', 'GENERAL', 'GENERATED', 'GET',
423
+ 'GLOBAL', 'GO', 'GOTO', 'GRANT', 'GRANTED', 'GROUP', 'GROUPING',
424
+ 'HANDLER', 'HAVING', 'HIERARCHY', 'HOLD', 'HOST', 'IDENTITY', 'IF',
425
+ 'IGNORE', 'ILIKE', 'IMMEDIATE', 'IMMEDIATELY', 'IMMUTABLE', 'IMPLEMENTATION', 'IMPLICIT',
426
+ 'IN', 'INCLUDING', 'INCREMENT', 'INDEX', 'INDITCATOR', 'INFIX',
427
+ 'INHERITS', 'INITIALIZE', 'INITIALLY', 'INNER', 'INOUT', 'INPUT',
428
+ 'INSENSITIVE', 'INSERT', 'INSTANTIABLE', 'INSTEAD', 'INTERSECT', 'INTO',
429
+ 'INVOKER', 'IS', 'ISNULL', 'ISOLATION', 'ITERATE', 'JOIN', 'KEY',
430
+ 'KEY_MEMBER', 'KEY_TYPE', 'LANCOMPILER', 'LANGUAGE', 'LARGE', 'LAST',
431
+ 'LATERAL', 'LEADING', 'LEFT', 'LENGTH', 'LESS', 'LEVEL', 'LIKE', 'LIMIT',
432
+ 'LISTEN', 'LOAD', 'LOCAL', 'LOCALTIME', 'LOCALTIMESTAMP', 'LOCATION',
433
+ 'LOCATOR', 'LOCK', 'LOWER', 'MAP', 'MATCH', 'MAX', 'MAXVALUE',
434
+ 'MESSAGE_LENGTH', 'MESSAGE_OCTET_LENGTH', 'MESSAGE_TEXT', 'METHOD', 'MIN',
435
+ 'MINUTE', 'MINVALUE', 'MOD', 'MODE', 'MODIFIES', 'MODIFY', 'MONTH',
436
+ 'MORE', 'MOVE', 'MUMPS', 'NAMES', 'NATIONAL', 'NATURAL', 'NCHAR', 'NCLOB',
437
+ 'NEW', 'NEXT', 'NO', 'NOCREATEDB', 'NOCREATEUSER', 'NONE', 'NOT',
438
+ 'NOTHING', 'NOTIFY', 'NOTNULL', 'NULL', 'NULLABLE', 'NULLIF', 'OBJECT',
439
+ 'OCTET_LENGTH', 'OF', 'OFF', 'OFFSET', 'OIDS', 'OLD', 'ON', 'ONLY',
440
+ 'OPEN', 'OPERATION', 'OPERATOR', 'OPTION', 'OPTIONS', 'OR', 'ORDER',
441
+ 'ORDINALITY', 'OUT', 'OUTER', 'OUTPUT', 'OVERLAPS', 'OVERLAY',
442
+ 'OVERRIDING', 'OWNER', 'PAD', 'PARAMETER', 'PARAMETERS', 'PARAMETER_MODE',
443
+ 'PARAMATER_NAME', 'PARAMATER_ORDINAL_POSITION',
444
+ 'PARAMETER_SPECIFIC_CATALOG', 'PARAMETER_SPECIFIC_NAME',
445
+ 'PARAMATER_SPECIFIC_SCHEMA', 'PARTIAL', 'PASCAL', 'PENDANT', 'PERIOD', 'PLACING',
446
+ 'PLI', 'POSITION', 'POSTFIX', 'PRECEEDS', 'PRECISION', 'PREFIX', 'PREORDER',
447
+ 'PREPARE', 'PRESERVE', 'PRIMARY', 'PRIOR', 'PRIVILEGES', 'PROCEDURAL',
448
+ 'PROCEDURE', 'PUBLIC', 'READ', 'READS', 'RECHECK', 'RECURSIVE', 'REF',
449
+ 'REFERENCES', 'REFERENCING', 'REINDEX', 'RELATIVE', 'RENAME',
450
+ 'REPEATABLE', 'REPLACE', 'RESET', 'RESTART', 'RESTRICT', 'RESULT',
451
+ 'RETURN', 'RETURNED_LENGTH', 'RETURNED_OCTET_LENGTH', 'RETURNED_SQLSTATE',
452
+ 'RETURNS', 'REVOKE', 'RIGHT', 'ROLE', 'ROLLBACK', 'ROLLUP', 'ROUTINE',
453
+ 'ROUTINE_CATALOG', 'ROUTINE_NAME', 'ROUTINE_SCHEMA', 'ROW', 'ROWS',
454
+ 'ROW_COUNT', 'RULE', 'SAVE_POINT', 'SCALE', 'SCHEMA', 'SCHEMA_NAME',
455
+ 'SCOPE', 'SCROLL', 'SEARCH', 'SECOND', 'SECURITY', 'SELECT', 'SELF',
456
+ 'SENSITIVE', 'SERIALIZABLE', 'SERVER_NAME', 'SESSION', 'SESSION_USER',
457
+ 'SET', 'SETOF', 'SETS', 'SHARE', 'SHOW', 'SIMILAR', 'SIMPLE', 'SIZE',
458
+ 'SOME', 'SOURCE', 'SPACE', 'SPECIFIC', 'SPECIFICTYPE', 'SPECIFIC_NAME',
459
+ 'SQL', 'SQLCODE', 'SQLERROR', 'SQLEXCEPTION', 'SQLSTATE', 'SQLWARNINIG',
460
+ 'STABLE', 'START', 'STATE', 'STATEMENT', 'STATIC', 'STATISTICS', 'STDIN',
461
+ 'STDOUT', 'STORAGE', 'STRICT', 'STRUCTURE', 'STYPE', 'SUBCLASS_ORIGIN',
462
+ 'SUBLIST', 'SUBSTRING', 'SUCCEEDS', 'SUM', 'SYMMETRIC', 'SYSID', 'SYSTEM',
463
+ 'SYSTEM_USER', 'TABLE', 'TABLE_NAME', ' TEMP', 'TEMPLATE', 'TEMPORARY',
464
+ 'TERMINATE', 'THAN', 'THEN', 'TIME', 'TIMESTAMP', 'TIMEZONE_HOUR',
465
+ 'TIMEZONE_MINUTE', 'TO', 'TOAST', 'TRAILING', 'TRANSATION',
466
+ 'TRANSACTIONS_COMMITTED', 'TRANSACTIONS_ROLLED_BACK', 'TRANSATION_ACTIVE',
467
+ 'TRANSFORM', 'TRANSFORMS', 'TRANSLATE', 'TRANSLATION', 'TREAT', 'TRIGGER',
468
+ 'TRIGGER_CATALOG', 'TRIGGER_NAME', 'TRIGGER_SCHEMA', 'TRIM', 'TRUE',
469
+ 'TRUNCATE', 'TRUSTED', 'TYPE', 'UNCOMMITTED', 'UNDER', 'UNENCRYPTED',
470
+ 'UNION', 'UNIQUE', 'UNKNOWN', 'UNLISTEN', 'UNNAMED', 'UNNEST', 'UNTIL',
471
+ 'UPDATE', 'UPPER', 'USAGE', 'USER', 'USER_DEFINED_TYPE_CATALOG',
472
+ 'USER_DEFINED_TYPE_NAME', 'USER_DEFINED_TYPE_SCHEMA', 'USING', 'VACUUM',
473
+ 'VALID', 'VALIDATOR', 'VALUES', 'VARIABLE', 'VERBOSE',
474
+ 'VERSION', 'VERSIONS', 'VERSIONING', 'VIEW',
475
+ 'VOLATILE', 'WHEN', 'WHENEVER', 'WHERE', 'WITH', 'WITHOUT', 'WORK',
476
+ 'WRITE', 'YEAR', 'ZONE'), suffix=r'\b'),
477
+ Keyword),
478
+ (words((
479
+ 'ARRAY', 'BIGINT', 'BINARY', 'BIT', 'BLOB', 'BOOLEAN', 'CHAR',
480
+ 'CHARACTER', 'DATE', 'DEC', 'DECIMAL', 'FLOAT', 'INT', 'INTEGER',
481
+ 'INTERVAL', 'NUMBER', 'NUMERIC', 'REAL', 'SERIAL', 'SMALLINT',
482
+ 'VARCHAR', 'VARYING', 'INT8', 'SERIAL8', 'TEXT'), suffix=r'\b'),
483
+ Name.Builtin),
484
+ (r'[+*/<>=~!@#%^&|`?-]', Operator),
485
+ (r'[0-9]+', Number.Integer),
486
+ # TODO: Backslash escapes?
487
+ (r"'(''|[^'])*'", String.Single),
488
+ (r'"(""|[^"])*"', String.Symbol), # not a real string literal in ANSI SQL
489
+ (r'[a-z_][\w$]*', Name), # allow $s in strings for Oracle
490
+ (r'[;:()\[\],.]', Punctuation)
491
+ ],
492
+ 'multiline-comments': [
493
+ (r'/\*', Comment.Multiline, 'multiline-comments'),
494
+ (r'\*/', Comment.Multiline, '#pop'),
495
+ (r'[^/*]+', Comment.Multiline),
496
+ (r'[/*]', Comment.Multiline)
497
+ ]
498
+ }
499
+
500
+ def analyse_text(text):
501
+ return 0.01
502
+
503
+
504
+ class TransactSqlLexer(RegexLexer):
505
+ """
506
+ Transact-SQL (T-SQL) is Microsoft's and Sybase's proprietary extension to
507
+ SQL.
508
+
509
+ The list of keywords includes ODBC and keywords reserved for future use..
510
+ """
511
+
512
+ name = 'Transact-SQL'
513
+ aliases = ['tsql', 't-sql']
514
+ filenames = ['*.sql']
515
+ mimetypes = ['text/x-tsql']
516
+
517
+ # Use re.UNICODE to allow non ASCII letters in names.
518
+ flags = re.IGNORECASE | re.UNICODE
519
+ tokens = {
520
+ 'root': [
521
+ (r'\s+', Whitespace),
522
+ (r'--.*?$\n?', Comment.Single),
523
+ (r'/\*', Comment.Multiline, 'multiline-comments'),
524
+ (words(_tsql_builtins.OPERATORS), Operator),
525
+ (words(_tsql_builtins.OPERATOR_WORDS, suffix=r'\b'), Operator.Word),
526
+ (words(_tsql_builtins.TYPES, suffix=r'\b'), Name.Class),
527
+ (words(_tsql_builtins.FUNCTIONS, suffix=r'\b'), Name.Function),
528
+ (r'(goto)(\s+)(\w+\b)', bygroups(Keyword, Whitespace, Name.Label)),
529
+ (words(_tsql_builtins.KEYWORDS, suffix=r'\b'), Keyword),
530
+ (r'(\[)([^]]+)(\])', bygroups(Operator, Name, Operator)),
531
+ (r'0x[0-9a-f]+', Number.Hex),
532
+ # Float variant 1, for example: 1., 1.e2, 1.2e3
533
+ (r'[0-9]+\.[0-9]*(e[+-]?[0-9]+)?', Number.Float),
534
+ # Float variant 2, for example: .1, .1e2
535
+ (r'\.[0-9]+(e[+-]?[0-9]+)?', Number.Float),
536
+ # Float variant 3, for example: 123e45
537
+ (r'[0-9]+e[+-]?[0-9]+', Number.Float),
538
+ (r'[0-9]+', Number.Integer),
539
+ (r"'(''|[^'])*'", String.Single),
540
+ (r'"(""|[^"])*"', String.Symbol),
541
+ (r'[;(),.]', Punctuation),
542
+ # Below we use \w even for the first "real" character because
543
+ # tokens starting with a digit have already been recognized
544
+ # as Number above.
545
+ (r'@@\w+', Name.Builtin),
546
+ (r'@\w+', Name.Variable),
547
+ (r'(\w+)(:)', bygroups(Name.Label, Punctuation)),
548
+ (r'#?#?\w+', Name), # names for temp tables and anything else
549
+ (r'\?', Name.Variable.Magic), # parameter for prepared statements
550
+ ],
551
+ 'multiline-comments': [
552
+ (r'/\*', Comment.Multiline, 'multiline-comments'),
553
+ (r'\*/', Comment.Multiline, '#pop'),
554
+ (r'[^/*]+', Comment.Multiline),
555
+ (r'[/*]', Comment.Multiline)
556
+ ]
557
+ }
558
+
559
+ def analyse_text(text):
560
+ rating = 0
561
+ if tsql_declare_re.search(text):
562
+ # Found T-SQL variable declaration.
563
+ rating = 1.0
564
+ else:
565
+ name_between_backtick_count = len(
566
+ name_between_backtick_re.findall(text))
567
+ name_between_bracket_count = len(
568
+ name_between_bracket_re.findall(text))
569
+ # We need to check if there are any names using
570
+ # backticks or brackets, as otherwise both are 0
571
+ # and 0 >= 2 * 0, so we would always assume it's true
572
+ dialect_name_count = name_between_backtick_count + name_between_bracket_count
573
+ if dialect_name_count >= 1 and \
574
+ name_between_bracket_count >= 2 * name_between_backtick_count:
575
+ # Found at least twice as many [name] as `name`.
576
+ rating += 0.5
577
+ elif name_between_bracket_count > name_between_backtick_count:
578
+ rating += 0.2
579
+ elif name_between_bracket_count > 0:
580
+ rating += 0.1
581
+ if tsql_variable_re.search(text) is not None:
582
+ rating += 0.1
583
+ if tsql_go_re.search(text) is not None:
584
+ rating += 0.1
585
+ return rating
586
+
587
+
588
+ class MySqlLexer(RegexLexer):
589
+ """The Oracle MySQL lexer.
590
+
591
+ This lexer does not attempt to maintain strict compatibility with
592
+ MariaDB syntax or keywords. Although MySQL and MariaDB's common code
593
+ history suggests there may be significant overlap between the two,
594
+ compatibility between the two is not a target for this lexer.
595
+ """
596
+
597
+ name = 'MySQL'
598
+ aliases = ['mysql']
599
+ mimetypes = ['text/x-mysql']
600
+
601
+ flags = re.IGNORECASE
602
+ tokens = {
603
+ 'root': [
604
+ (r'\s+', Text),
605
+
606
+ # Comments
607
+ (r'(?:#|--\s+).*', Comment.Single),
608
+ (r'/\*\+', Comment.Special, 'optimizer-hints'),
609
+ (r'/\*', Comment.Multiline, 'multiline-comment'),
610
+
611
+ # Hexadecimal literals
612
+ (r"x'([0-9a-f]{2})+'", Number.Hex), # MySQL requires paired hex characters in this form.
613
+ (r'0x[0-9a-f]+', Number.Hex),
614
+
615
+ # Binary literals
616
+ (r"b'[01]+'", Number.Bin),
617
+ (r'0b[01]+', Number.Bin),
618
+
619
+ # Numeric literals
620
+ (r'[0-9]+\.[0-9]*(e[+-]?[0-9]+)?', Number.Float), # Mandatory integer, optional fraction and exponent
621
+ (r'[0-9]*\.[0-9]+(e[+-]?[0-9]+)?', Number.Float), # Mandatory fraction, optional integer and exponent
622
+ (r'[0-9]+e[+-]?[0-9]+', Number.Float), # Exponents with integer significands are still floats
623
+ (r'[0-9]+(?=[^0-9a-z$_\u0080-\uffff])', Number.Integer), # Integers that are not in a schema object name
624
+
625
+ # Date literals
626
+ (r"\{\s*d\s*(?P<quote>['\"])\s*\d{2}(\d{2})?.?\d{2}.?\d{2}\s*(?P=quote)\s*\}",
627
+ Literal.Date),
628
+
629
+ # Time literals
630
+ (r"\{\s*t\s*(?P<quote>['\"])\s*(?:\d+\s+)?\d{1,2}.?\d{1,2}.?\d{1,2}(\.\d*)?\s*(?P=quote)\s*\}",
631
+ Literal.Date),
632
+
633
+ # Timestamp literals
634
+ (
635
+ r"\{\s*ts\s*(?P<quote>['\"])\s*"
636
+ r"\d{2}(?:\d{2})?.?\d{2}.?\d{2}" # Date part
637
+ r"\s+" # Whitespace between date and time
638
+ r"\d{1,2}.?\d{1,2}.?\d{1,2}(\.\d*)?" # Time part
639
+ r"\s*(?P=quote)\s*\}",
640
+ Literal.Date
641
+ ),
642
+
643
+ # String literals
644
+ (r"'", String.Single, 'single-quoted-string'),
645
+ (r'"', String.Double, 'double-quoted-string'),
646
+
647
+ # Variables
648
+ (r'@@(?:global\.|persist\.|persist_only\.|session\.)?[a-z_]+', Name.Variable),
649
+ (r'@[a-z0-9_$.]+', Name.Variable),
650
+ (r"@'", Name.Variable, 'single-quoted-variable'),
651
+ (r'@"', Name.Variable, 'double-quoted-variable'),
652
+ (r"@`", Name.Variable, 'backtick-quoted-variable'),
653
+ (r'\?', Name.Variable), # For demonstrating prepared statements
654
+
655
+ # Operators
656
+ (r'[!%&*+/:<=>^|~-]+', Operator),
657
+
658
+ # Exceptions; these words tokenize differently in different contexts.
659
+ (r'\b(set)(?!\s*\()', Keyword),
660
+ (r'\b(character)(\s+)(set)\b', bygroups(Keyword, Text, Keyword)),
661
+ # In all other known cases, "SET" is tokenized by MYSQL_DATATYPES.
662
+
663
+ (words(MYSQL_CONSTANTS, prefix=r'\b', suffix=r'\b'), Name.Constant),
664
+ (words(MYSQL_DATATYPES, prefix=r'\b', suffix=r'\b'), Keyword.Type),
665
+ (words(MYSQL_KEYWORDS, prefix=r'\b', suffix=r'\b'), Keyword),
666
+ (words(MYSQL_FUNCTIONS, prefix=r'\b', suffix=r'\b(\s*)(\()'),
667
+ bygroups(Name.Function, Text, Punctuation)),
668
+
669
+ # Schema object names
670
+ #
671
+ # Note: Although the first regex supports unquoted all-numeric
672
+ # identifiers, this will not be a problem in practice because
673
+ # numeric literals have already been handled above.
674
+ #
675
+ ('[0-9a-z$_\u0080-\uffff]+', Name),
676
+ (r'`', Name.Quoted, 'schema-object-name'),
677
+
678
+ # Punctuation
679
+ (r'[(),.;]', Punctuation),
680
+ ],
681
+
682
+ # Multiline comment substates
683
+ # ---------------------------
684
+
685
+ 'optimizer-hints': [
686
+ (r'[^*a-z]+', Comment.Special),
687
+ (r'\*/', Comment.Special, '#pop'),
688
+ (words(MYSQL_OPTIMIZER_HINTS, suffix=r'\b'), Comment.Preproc),
689
+ ('[a-z]+', Comment.Special),
690
+ (r'\*', Comment.Special),
691
+ ],
692
+
693
+ 'multiline-comment': [
694
+ (r'[^*]+', Comment.Multiline),
695
+ (r'\*/', Comment.Multiline, '#pop'),
696
+ (r'\*', Comment.Multiline),
697
+ ],
698
+
699
+ # String substates
700
+ # ----------------
701
+
702
+ 'single-quoted-string': [
703
+ (r"[^'\\]+", String.Single),
704
+ (r"''", String.Escape),
705
+ (r"""\\[0'"bnrtZ\\%_]""", String.Escape),
706
+ (r"'", String.Single, '#pop'),
707
+ ],
708
+
709
+ 'double-quoted-string': [
710
+ (r'[^"\\]+', String.Double),
711
+ (r'""', String.Escape),
712
+ (r"""\\[0'"bnrtZ\\%_]""", String.Escape),
713
+ (r'"', String.Double, '#pop'),
714
+ ],
715
+
716
+ # Variable substates
717
+ # ------------------
718
+
719
+ 'single-quoted-variable': [
720
+ (r"[^']+", Name.Variable),
721
+ (r"''", Name.Variable),
722
+ (r"'", Name.Variable, '#pop'),
723
+ ],
724
+
725
+ 'double-quoted-variable': [
726
+ (r'[^"]+', Name.Variable),
727
+ (r'""', Name.Variable),
728
+ (r'"', Name.Variable, '#pop'),
729
+ ],
730
+
731
+ 'backtick-quoted-variable': [
732
+ (r'[^`]+', Name.Variable),
733
+ (r'``', Name.Variable),
734
+ (r'`', Name.Variable, '#pop'),
735
+ ],
736
+
737
+ # Schema object name substates
738
+ # ----------------------------
739
+ #
740
+ # "Name.Quoted" and "Name.Quoted.Escape" are non-standard but
741
+ # formatters will style them as "Name" by default but add
742
+ # additional styles based on the token name. This gives users
743
+ # flexibility to add custom styles as desired.
744
+ #
745
+ 'schema-object-name': [
746
+ (r'[^`]+', Name.Quoted),
747
+ (r'``', Name.Quoted.Escape),
748
+ (r'`', Name.Quoted, '#pop'),
749
+ ],
750
+ }
751
+
752
+ def analyse_text(text):
753
+ rating = 0
754
+ name_between_backtick_count = len(
755
+ name_between_backtick_re.findall(text))
756
+ name_between_bracket_count = len(
757
+ name_between_bracket_re.findall(text))
758
+ # Same logic as above in the TSQL analysis
759
+ dialect_name_count = name_between_backtick_count + name_between_bracket_count
760
+ if dialect_name_count >= 1 and \
761
+ name_between_backtick_count >= 2 * name_between_bracket_count:
762
+ # Found at least twice as many `name` as [name].
763
+ rating += 0.5
764
+ elif name_between_backtick_count > name_between_bracket_count:
765
+ rating += 0.2
766
+ elif name_between_backtick_count > 0:
767
+ rating += 0.1
768
+ return rating
769
+
770
+
771
+ class SqliteConsoleLexer(Lexer):
772
+ """
773
+ Lexer for example sessions using sqlite3.
774
+
775
+ .. versionadded:: 0.11
776
+ """
777
+
778
+ name = 'sqlite3con'
779
+ aliases = ['sqlite3']
780
+ filenames = ['*.sqlite3-console']
781
+ mimetypes = ['text/x-sqlite3-console']
782
+
783
+ def get_tokens_unprocessed(self, data):
784
+ sql = SqlLexer(**self.options)
785
+
786
+ curcode = ''
787
+ insertions = []
788
+ for match in line_re.finditer(data):
789
+ line = match.group()
790
+ if line.startswith('sqlite> ') or line.startswith(' ...> '):
791
+ insertions.append((len(curcode),
792
+ [(0, Generic.Prompt, line[:8])]))
793
+ curcode += line[8:]
794
+ else:
795
+ if curcode:
796
+ yield from do_insertions(insertions,
797
+ sql.get_tokens_unprocessed(curcode))
798
+ curcode = ''
799
+ insertions = []
800
+ if line.startswith('SQL error: '):
801
+ yield (match.start(), Generic.Traceback, line)
802
+ else:
803
+ yield (match.start(), Generic.Output, line)
804
+ if curcode:
805
+ yield from do_insertions(insertions,
806
+ sql.get_tokens_unprocessed(curcode))
807
+
808
+
809
+ class RqlLexer(RegexLexer):
810
+ """
811
+ Lexer for Relation Query Language.
812
+
813
+ `RQL <http://www.logilab.org/project/rql>`_
814
+
815
+ .. versionadded:: 2.0
816
+ """
817
+ name = 'RQL'
818
+ aliases = ['rql']
819
+ filenames = ['*.rql']
820
+ mimetypes = ['text/x-rql']
821
+
822
+ flags = re.IGNORECASE
823
+ tokens = {
824
+ 'root': [
825
+ (r'\s+', Text),
826
+ (r'(DELETE|SET|INSERT|UNION|DISTINCT|WITH|WHERE|BEING|OR'
827
+ r'|AND|NOT|GROUPBY|HAVING|ORDERBY|ASC|DESC|LIMIT|OFFSET'
828
+ r'|TODAY|NOW|TRUE|FALSE|NULL|EXISTS)\b', Keyword),
829
+ (r'[+*/<>=%-]', Operator),
830
+ (r'(Any|is|instance_of|CWEType|CWRelation)\b', Name.Builtin),
831
+ (r'[0-9]+', Number.Integer),
832
+ (r'[A-Z_]\w*\??', Name),
833
+ (r"'(''|[^'])*'", String.Single),
834
+ (r'"(""|[^"])*"', String.Single),
835
+ (r'[;:()\[\],.]', Punctuation)
836
+ ],
837
+ }