pygments.rb 1.2.1 → 2.0.0.rc2

Sign up to get free protection for your applications and to get access to all the features.
Files changed (378) hide show
  1. checksums.yaml +5 -5
  2. data/.github/dependabot.yml +13 -0
  3. data/.github/workflows/ci.yml +28 -0
  4. data/.github/workflows/release.yml +24 -0
  5. data/.gitignore +5 -6
  6. data/CHANGELOG.adoc +119 -0
  7. data/Gemfile +3 -1
  8. data/LICENSE +1 -1
  9. data/README.adoc +161 -0
  10. data/Rakefile +10 -21
  11. data/bench.rb +8 -7
  12. data/cache-lexers.rb +3 -2
  13. data/lib/pygments.rb +10 -11
  14. data/lib/pygments/lexer.rb +5 -5
  15. data/lib/pygments/mentos.py +23 -66
  16. data/lib/pygments/popen.rb +152 -204
  17. data/lib/pygments/version.rb +2 -2
  18. data/pygments.rb.gemspec +11 -9
  19. data/test/test_pygments.rb +51 -84
  20. data/vendor/pygments-main/{AUTHORS → Pygments-2.7.3.dist-info/AUTHORS} +21 -3
  21. data/vendor/pygments-main/Pygments-2.7.3.dist-info/INSTALLER +1 -0
  22. data/vendor/pygments-main/{LICENSE → Pygments-2.7.3.dist-info/LICENSE} +1 -1
  23. data/vendor/pygments-main/Pygments-2.7.3.dist-info/METADATA +49 -0
  24. data/vendor/pygments-main/Pygments-2.7.3.dist-info/RECORD +482 -0
  25. data/vendor/pygments-main/Pygments-2.7.3.dist-info/REQUESTED +0 -0
  26. data/vendor/pygments-main/Pygments-2.7.3.dist-info/WHEEL +5 -0
  27. data/vendor/pygments-main/Pygments-2.7.3.dist-info/entry_points.txt +3 -0
  28. data/vendor/pygments-main/Pygments-2.7.3.dist-info/top_level.txt +1 -0
  29. data/vendor/pygments-main/bin/pygmentize +8 -0
  30. data/vendor/pygments-main/pygments/__init__.py +6 -11
  31. data/vendor/pygments-main/pygments/__main__.py +18 -0
  32. data/vendor/pygments-main/pygments/cmdline.py +38 -29
  33. data/vendor/pygments-main/pygments/console.py +6 -9
  34. data/vendor/pygments-main/pygments/filter.py +4 -6
  35. data/vendor/pygments-main/pygments/filters/__init__.py +609 -21
  36. data/vendor/pygments-main/pygments/formatter.py +4 -4
  37. data/vendor/pygments-main/pygments/formatters/__init__.py +9 -8
  38. data/vendor/pygments-main/pygments/formatters/_mapping.py +1 -3
  39. data/vendor/pygments-main/pygments/formatters/bbcode.py +1 -1
  40. data/vendor/pygments-main/pygments/formatters/html.py +223 -135
  41. data/vendor/pygments-main/pygments/formatters/img.py +68 -41
  42. data/vendor/pygments-main/pygments/formatters/irc.py +39 -39
  43. data/vendor/pygments-main/pygments/formatters/latex.py +56 -26
  44. data/vendor/pygments-main/pygments/formatters/other.py +12 -8
  45. data/vendor/pygments-main/pygments/formatters/rtf.py +29 -29
  46. data/vendor/pygments-main/pygments/formatters/svg.py +38 -4
  47. data/vendor/pygments-main/pygments/formatters/terminal.py +25 -31
  48. data/vendor/pygments-main/pygments/formatters/terminal256.py +22 -12
  49. data/vendor/pygments-main/pygments/lexer.py +41 -39
  50. data/vendor/pygments-main/pygments/lexers/__init__.py +342 -0
  51. data/vendor/pygments-main/pygments/lexers/_asy_builtins.py +1645 -0
  52. data/vendor/pygments-main/pygments/lexers/_cl_builtins.py +232 -0
  53. data/vendor/pygments-main/pygments/lexers/_cocoa_builtins.py +71 -0
  54. data/vendor/pygments-main/pygments/lexers/_csound_builtins.py +1725 -0
  55. data/vendor/pygments-main/pygments/lexers/_lasso_builtins.py +5327 -0
  56. data/vendor/pygments-main/pygments/lexers/_lua_builtins.py +293 -0
  57. data/vendor/pygments-main/pygments/lexers/_mapping.py +551 -0
  58. data/vendor/pygments-main/pygments/lexers/_mql_builtins.py +1172 -0
  59. data/vendor/pygments-main/pygments/lexers/_mysql_builtins.py +1282 -0
  60. data/vendor/pygments-main/pygments/lexers/_openedge_builtins.py +2547 -0
  61. data/vendor/pygments-main/pygments/lexers/_php_builtins.py +4753 -0
  62. data/vendor/pygments-main/pygments/lexers/_postgres_builtins.py +678 -0
  63. data/vendor/pygments-main/pygments/lexers/_scilab_builtins.py +3094 -0
  64. data/vendor/pygments-main/pygments/lexers/_sourcemod_builtins.py +1161 -0
  65. data/vendor/pygments-main/pygments/lexers/_stan_builtins.py +558 -0
  66. data/vendor/pygments-main/pygments/lexers/_stata_builtins.py +421 -0
  67. data/vendor/pygments-main/pygments/lexers/_tsql_builtins.py +1004 -0
  68. data/vendor/pygments-main/pygments/lexers/_usd_builtins.py +113 -0
  69. data/vendor/pygments-main/pygments/lexers/_vbscript_builtins.py +280 -0
  70. data/vendor/pygments-main/pygments/lexers/_vim_builtins.py +1939 -0
  71. data/vendor/pygments-main/pygments/lexers/actionscript.py +245 -0
  72. data/vendor/pygments-main/pygments/lexers/agile.py +24 -0
  73. data/vendor/pygments-main/pygments/lexers/algebra.py +240 -0
  74. data/vendor/pygments-main/pygments/lexers/ambient.py +76 -0
  75. data/vendor/pygments-main/pygments/lexers/ampl.py +87 -0
  76. data/vendor/pygments-main/pygments/lexers/apl.py +101 -0
  77. data/vendor/pygments-main/pygments/lexers/archetype.py +318 -0
  78. data/vendor/pygments-main/pygments/lexers/arrow.py +117 -0
  79. data/vendor/pygments-main/pygments/lexers/asm.py +1005 -0
  80. data/vendor/pygments-main/pygments/lexers/automation.py +374 -0
  81. data/vendor/pygments-main/pygments/lexers/bare.py +104 -0
  82. data/vendor/pygments-main/pygments/lexers/basic.py +662 -0
  83. data/vendor/pygments-main/pygments/lexers/bibtex.py +160 -0
  84. data/vendor/pygments-main/pygments/lexers/boa.py +102 -0
  85. data/vendor/pygments-main/pygments/lexers/business.py +627 -0
  86. data/vendor/pygments-main/pygments/lexers/c_cpp.py +344 -0
  87. data/vendor/pygments-main/pygments/lexers/c_like.py +566 -0
  88. data/vendor/pygments-main/pygments/lexers/capnproto.py +78 -0
  89. data/vendor/pygments-main/pygments/lexers/chapel.py +112 -0
  90. data/vendor/pygments-main/pygments/lexers/clean.py +179 -0
  91. data/vendor/pygments-main/pygments/lexers/compiled.py +34 -0
  92. data/vendor/pygments-main/pygments/lexers/configs.py +984 -0
  93. data/vendor/pygments-main/pygments/lexers/console.py +114 -0
  94. data/vendor/pygments-main/pygments/lexers/crystal.py +392 -0
  95. data/vendor/pygments-main/pygments/lexers/csound.py +467 -0
  96. data/vendor/pygments-main/pygments/lexers/css.py +691 -0
  97. data/vendor/pygments-main/pygments/lexers/d.py +256 -0
  98. data/vendor/pygments-main/pygments/lexers/dalvik.py +125 -0
  99. data/vendor/pygments-main/pygments/lexers/data.py +698 -0
  100. data/vendor/pygments-main/pygments/lexers/devicetree.py +109 -0
  101. data/vendor/pygments-main/pygments/lexers/diff.py +165 -0
  102. data/vendor/pygments-main/pygments/lexers/dotnet.py +707 -0
  103. data/vendor/pygments-main/pygments/lexers/dsls.py +960 -0
  104. data/vendor/pygments-main/pygments/lexers/dylan.py +287 -0
  105. data/vendor/pygments-main/pygments/lexers/ecl.py +139 -0
  106. data/vendor/pygments-main/pygments/lexers/eiffel.py +65 -0
  107. data/vendor/pygments-main/pygments/lexers/elm.py +121 -0
  108. data/vendor/pygments-main/pygments/lexers/email.py +151 -0
  109. data/vendor/pygments-main/pygments/lexers/erlang.py +530 -0
  110. data/vendor/pygments-main/pygments/lexers/esoteric.py +304 -0
  111. data/vendor/pygments-main/pygments/lexers/ezhil.py +77 -0
  112. data/vendor/pygments-main/pygments/lexers/factor.py +344 -0
  113. data/vendor/pygments-main/pygments/lexers/fantom.py +250 -0
  114. data/vendor/pygments-main/pygments/lexers/felix.py +273 -0
  115. data/vendor/pygments-main/pygments/lexers/floscript.py +83 -0
  116. data/vendor/pygments-main/pygments/lexers/forth.py +178 -0
  117. data/vendor/pygments-main/pygments/lexers/fortran.py +206 -0
  118. data/vendor/pygments-main/pygments/lexers/foxpro.py +428 -0
  119. data/vendor/pygments-main/pygments/lexers/freefem.py +898 -0
  120. data/vendor/pygments-main/pygments/lexers/functional.py +21 -0
  121. data/vendor/pygments-main/pygments/lexers/gdscript.py +346 -0
  122. data/vendor/pygments-main/pygments/lexers/go.py +101 -0
  123. data/vendor/pygments-main/pygments/lexers/grammar_notation.py +270 -0
  124. data/vendor/pygments-main/pygments/lexers/graph.py +85 -0
  125. data/vendor/pygments-main/pygments/lexers/graphics.py +800 -0
  126. data/vendor/pygments-main/pygments/lexers/haskell.py +870 -0
  127. data/vendor/pygments-main/pygments/lexers/haxe.py +936 -0
  128. data/vendor/pygments-main/pygments/lexers/hdl.py +472 -0
  129. data/vendor/pygments-main/pygments/lexers/hexdump.py +103 -0
  130. data/vendor/pygments-main/pygments/lexers/html.py +614 -0
  131. data/vendor/pygments-main/pygments/lexers/idl.py +281 -0
  132. data/vendor/pygments-main/pygments/lexers/igor.py +420 -0
  133. data/vendor/pygments-main/pygments/lexers/inferno.py +96 -0
  134. data/vendor/pygments-main/pygments/lexers/installers.py +322 -0
  135. data/vendor/pygments-main/pygments/lexers/int_fiction.py +1368 -0
  136. data/vendor/pygments-main/pygments/lexers/iolang.py +63 -0
  137. data/vendor/pygments-main/pygments/lexers/j.py +146 -0
  138. data/vendor/pygments-main/pygments/lexers/javascript.py +1540 -0
  139. data/vendor/pygments-main/pygments/lexers/julia.py +331 -0
  140. data/vendor/pygments-main/pygments/lexers/jvm.py +1673 -0
  141. data/vendor/pygments-main/pygments/lexers/lisp.py +2699 -0
  142. data/vendor/pygments-main/pygments/lexers/make.py +206 -0
  143. data/vendor/pygments-main/pygments/lexers/markup.py +765 -0
  144. data/vendor/pygments-main/pygments/lexers/math.py +21 -0
  145. data/vendor/pygments-main/pygments/lexers/matlab.py +720 -0
  146. data/vendor/pygments-main/pygments/lexers/mime.py +226 -0
  147. data/vendor/pygments-main/pygments/lexers/ml.py +958 -0
  148. data/vendor/pygments-main/pygments/lexers/modeling.py +366 -0
  149. data/vendor/pygments-main/pygments/lexers/modula2.py +1580 -0
  150. data/vendor/pygments-main/pygments/lexers/monte.py +204 -0
  151. data/vendor/pygments-main/pygments/lexers/mosel.py +448 -0
  152. data/vendor/pygments-main/pygments/lexers/ncl.py +894 -0
  153. data/vendor/pygments-main/pygments/lexers/nimrod.py +159 -0
  154. data/vendor/pygments-main/pygments/lexers/nit.py +64 -0
  155. data/vendor/pygments-main/pygments/lexers/nix.py +136 -0
  156. data/vendor/pygments-main/pygments/lexers/oberon.py +121 -0
  157. data/vendor/pygments-main/pygments/lexers/objective.py +504 -0
  158. data/vendor/pygments-main/pygments/lexers/ooc.py +85 -0
  159. data/vendor/pygments-main/pygments/lexers/other.py +41 -0
  160. data/vendor/pygments-main/pygments/lexers/parasail.py +79 -0
  161. data/vendor/pygments-main/pygments/lexers/parsers.py +800 -0
  162. data/vendor/pygments-main/pygments/lexers/pascal.py +644 -0
  163. data/vendor/pygments-main/pygments/lexers/pawn.py +205 -0
  164. data/vendor/pygments-main/pygments/lexers/perl.py +732 -0
  165. data/vendor/pygments-main/pygments/lexers/php.py +321 -0
  166. data/vendor/pygments-main/pygments/lexers/pointless.py +71 -0
  167. data/vendor/pygments-main/pygments/lexers/pony.py +94 -0
  168. data/vendor/pygments-main/pygments/lexers/praat.py +302 -0
  169. data/vendor/pygments-main/pygments/lexers/prolog.py +306 -0
  170. data/vendor/pygments-main/pygments/lexers/promql.py +183 -0
  171. data/vendor/pygments-main/pygments/lexers/python.py +1151 -0
  172. data/vendor/pygments-main/pygments/lexers/qvt.py +152 -0
  173. data/vendor/pygments-main/pygments/lexers/r.py +191 -0
  174. data/vendor/pygments-main/pygments/lexers/rdf.py +463 -0
  175. data/vendor/pygments-main/pygments/lexers/rebol.py +431 -0
  176. data/vendor/pygments-main/pygments/lexers/resource.py +85 -0
  177. data/vendor/pygments-main/pygments/lexers/ride.py +139 -0
  178. data/vendor/pygments-main/pygments/lexers/rnc.py +67 -0
  179. data/vendor/pygments-main/pygments/lexers/roboconf.py +82 -0
  180. data/vendor/pygments-main/pygments/lexers/robotframework.py +552 -0
  181. data/vendor/pygments-main/pygments/lexers/ruby.py +517 -0
  182. data/vendor/pygments-main/pygments/lexers/rust.py +224 -0
  183. data/vendor/pygments-main/pygments/lexers/sas.py +228 -0
  184. data/vendor/pygments-main/pygments/lexers/scdoc.py +83 -0
  185. data/vendor/pygments-main/pygments/lexers/scripting.py +1284 -0
  186. data/vendor/pygments-main/pygments/lexers/sgf.py +61 -0
  187. data/vendor/pygments-main/pygments/lexers/shell.py +914 -0
  188. data/vendor/pygments-main/pygments/lexers/sieve.py +69 -0
  189. data/vendor/pygments-main/pygments/lexers/slash.py +185 -0
  190. data/vendor/pygments-main/pygments/lexers/smalltalk.py +195 -0
  191. data/vendor/pygments-main/pygments/lexers/smv.py +79 -0
  192. data/vendor/pygments-main/pygments/lexers/snobol.py +83 -0
  193. data/vendor/pygments-main/pygments/lexers/solidity.py +92 -0
  194. data/vendor/pygments-main/pygments/lexers/special.py +105 -0
  195. data/vendor/pygments-main/pygments/lexers/sql.py +837 -0
  196. data/vendor/pygments-main/pygments/lexers/stata.py +171 -0
  197. data/vendor/pygments-main/pygments/lexers/supercollider.py +95 -0
  198. data/vendor/pygments-main/pygments/lexers/tcl.py +145 -0
  199. data/vendor/pygments-main/pygments/lexers/templates.py +2264 -0
  200. data/vendor/pygments-main/pygments/lexers/teraterm.py +335 -0
  201. data/vendor/pygments-main/pygments/lexers/testing.py +207 -0
  202. data/vendor/pygments-main/pygments/lexers/text.py +26 -0
  203. data/vendor/pygments-main/pygments/lexers/textedit.py +169 -0
  204. data/vendor/pygments-main/pygments/lexers/textfmts.py +430 -0
  205. data/vendor/pygments-main/pygments/lexers/theorem.py +474 -0
  206. data/vendor/pygments-main/pygments/lexers/tnt.py +263 -0
  207. data/vendor/pygments-main/pygments/lexers/trafficscript.py +54 -0
  208. data/vendor/pygments-main/pygments/lexers/typoscript.py +219 -0
  209. data/vendor/pygments-main/pygments/lexers/unicon.py +412 -0
  210. data/vendor/pygments-main/pygments/lexers/urbi.py +146 -0
  211. data/vendor/pygments-main/pygments/lexers/usd.py +90 -0
  212. data/vendor/pygments-main/pygments/lexers/varnish.py +190 -0
  213. data/vendor/pygments-main/pygments/lexers/verification.py +114 -0
  214. data/vendor/pygments-main/pygments/lexers/web.py +24 -0
  215. data/vendor/pygments-main/pygments/lexers/webidl.py +299 -0
  216. data/vendor/pygments-main/pygments/lexers/webmisc.py +991 -0
  217. data/vendor/pygments-main/pygments/lexers/whiley.py +116 -0
  218. data/vendor/pygments-main/pygments/lexers/x10.py +69 -0
  219. data/vendor/pygments-main/pygments/lexers/xorg.py +37 -0
  220. data/vendor/pygments-main/pygments/lexers/yang.py +104 -0
  221. data/vendor/pygments-main/pygments/lexers/zig.py +124 -0
  222. data/vendor/pygments-main/pygments/modeline.py +1 -1
  223. data/vendor/pygments-main/pygments/plugin.py +4 -2
  224. data/vendor/pygments-main/pygments/regexopt.py +1 -1
  225. data/vendor/pygments-main/pygments/scanner.py +2 -2
  226. data/vendor/pygments-main/pygments/sphinxext.py +2 -4
  227. data/vendor/pygments-main/pygments/style.py +61 -24
  228. data/vendor/pygments-main/pygments/styles/__init__.py +10 -4
  229. data/vendor/pygments-main/pygments/styles/abap.py +1 -1
  230. data/vendor/pygments-main/pygments/styles/algol.py +1 -1
  231. data/vendor/pygments-main/pygments/styles/algol_nu.py +1 -1
  232. data/vendor/pygments-main/pygments/styles/arduino.py +2 -2
  233. data/vendor/pygments-main/pygments/styles/autumn.py +1 -1
  234. data/vendor/pygments-main/pygments/styles/borland.py +1 -1
  235. data/vendor/pygments-main/pygments/styles/bw.py +1 -1
  236. data/vendor/pygments-main/pygments/styles/colorful.py +1 -1
  237. data/vendor/pygments-main/pygments/styles/default.py +1 -1
  238. data/vendor/pygments-main/pygments/styles/emacs.py +1 -1
  239. data/vendor/pygments-main/pygments/styles/friendly.py +1 -1
  240. data/vendor/pygments-main/pygments/styles/fruity.py +1 -1
  241. data/vendor/pygments-main/pygments/styles/igor.py +1 -1
  242. data/vendor/pygments-main/pygments/styles/inkpot.py +67 -0
  243. data/vendor/pygments-main/pygments/styles/lovelace.py +1 -1
  244. data/vendor/pygments-main/pygments/styles/manni.py +1 -1
  245. data/vendor/pygments-main/pygments/styles/monokai.py +4 -3
  246. data/vendor/pygments-main/pygments/styles/murphy.py +1 -1
  247. data/vendor/pygments-main/pygments/styles/native.py +1 -1
  248. data/vendor/pygments-main/pygments/styles/paraiso_dark.py +1 -1
  249. data/vendor/pygments-main/pygments/styles/paraiso_light.py +1 -1
  250. data/vendor/pygments-main/pygments/styles/pastie.py +1 -1
  251. data/vendor/pygments-main/pygments/styles/perldoc.py +1 -1
  252. data/vendor/pygments-main/pygments/styles/rainbow_dash.py +1 -1
  253. data/vendor/pygments-main/pygments/styles/rrt.py +1 -1
  254. data/vendor/pygments-main/pygments/styles/sas.py +1 -1
  255. data/vendor/pygments-main/pygments/styles/solarized.py +134 -0
  256. data/vendor/pygments-main/pygments/styles/stata_dark.py +41 -0
  257. data/vendor/pygments-main/pygments/styles/{stata.py → stata_light.py} +14 -15
  258. data/vendor/pygments-main/pygments/styles/tango.py +1 -1
  259. data/vendor/pygments-main/pygments/styles/trac.py +1 -1
  260. data/vendor/pygments-main/pygments/styles/vim.py +1 -1
  261. data/vendor/pygments-main/pygments/styles/vs.py +1 -1
  262. data/vendor/pygments-main/pygments/styles/xcode.py +1 -1
  263. data/vendor/pygments-main/pygments/token.py +1 -1
  264. data/vendor/pygments-main/pygments/unistring.py +47 -108
  265. data/vendor/pygments-main/pygments/util.py +15 -92
  266. metadata +69 -136
  267. data/CHANGELOG.md +0 -111
  268. data/README.md +0 -121
  269. data/circle.yml +0 -20
  270. data/test/test_data.py +0 -514
  271. data/test/test_data_generated +0 -2582
  272. data/vendor/custom_lexers/github.py +0 -565
  273. data/vendor/pygments-main/CHANGES +0 -1186
  274. data/vendor/pygments-main/MANIFEST.in +0 -6
  275. data/vendor/pygments-main/Makefile +0 -65
  276. data/vendor/pygments-main/README.rst +0 -39
  277. data/vendor/pygments-main/REVISION +0 -1
  278. data/vendor/pygments-main/TODO +0 -12
  279. data/vendor/pygments-main/doc/Makefile +0 -153
  280. data/vendor/pygments-main/doc/_static/favicon.ico +0 -0
  281. data/vendor/pygments-main/doc/_static/logo_new.png +0 -0
  282. data/vendor/pygments-main/doc/_static/logo_only.png +0 -0
  283. data/vendor/pygments-main/doc/_templates/docssidebar.html +0 -3
  284. data/vendor/pygments-main/doc/_templates/indexsidebar.html +0 -25
  285. data/vendor/pygments-main/doc/_themes/pygments14/layout.html +0 -98
  286. data/vendor/pygments-main/doc/_themes/pygments14/static/bodybg.png +0 -0
  287. data/vendor/pygments-main/doc/_themes/pygments14/static/docbg.png +0 -0
  288. data/vendor/pygments-main/doc/_themes/pygments14/static/listitem.png +0 -0
  289. data/vendor/pygments-main/doc/_themes/pygments14/static/logo.png +0 -0
  290. data/vendor/pygments-main/doc/_themes/pygments14/static/pocoo.png +0 -0
  291. data/vendor/pygments-main/doc/_themes/pygments14/static/pygments14.css_t +0 -401
  292. data/vendor/pygments-main/doc/_themes/pygments14/theme.conf +0 -15
  293. data/vendor/pygments-main/doc/conf.py +0 -241
  294. data/vendor/pygments-main/doc/docs/api.rst +0 -354
  295. data/vendor/pygments-main/doc/docs/authors.rst +0 -4
  296. data/vendor/pygments-main/doc/docs/changelog.rst +0 -1
  297. data/vendor/pygments-main/doc/docs/cmdline.rst +0 -166
  298. data/vendor/pygments-main/doc/docs/filterdevelopment.rst +0 -71
  299. data/vendor/pygments-main/doc/docs/filters.rst +0 -41
  300. data/vendor/pygments-main/doc/docs/formatterdevelopment.rst +0 -169
  301. data/vendor/pygments-main/doc/docs/formatters.rst +0 -48
  302. data/vendor/pygments-main/doc/docs/index.rst +0 -66
  303. data/vendor/pygments-main/doc/docs/integrate.rst +0 -40
  304. data/vendor/pygments-main/doc/docs/java.rst +0 -70
  305. data/vendor/pygments-main/doc/docs/lexerdevelopment.rst +0 -728
  306. data/vendor/pygments-main/doc/docs/lexers.rst +0 -69
  307. data/vendor/pygments-main/doc/docs/moinmoin.rst +0 -39
  308. data/vendor/pygments-main/doc/docs/plugins.rst +0 -93
  309. data/vendor/pygments-main/doc/docs/quickstart.rst +0 -205
  310. data/vendor/pygments-main/doc/docs/rstdirective.rst +0 -22
  311. data/vendor/pygments-main/doc/docs/styles.rst +0 -201
  312. data/vendor/pygments-main/doc/docs/tokens.rst +0 -372
  313. data/vendor/pygments-main/doc/docs/unicode.rst +0 -58
  314. data/vendor/pygments-main/doc/download.rst +0 -41
  315. data/vendor/pygments-main/doc/faq.rst +0 -139
  316. data/vendor/pygments-main/doc/index.rst +0 -54
  317. data/vendor/pygments-main/doc/languages.rst +0 -154
  318. data/vendor/pygments-main/doc/make.bat +0 -190
  319. data/vendor/pygments-main/doc/pygmentize.1 +0 -94
  320. data/vendor/pygments-main/external/autopygmentize +0 -101
  321. data/vendor/pygments-main/external/lasso-builtins-generator-9.lasso +0 -162
  322. data/vendor/pygments-main/external/markdown-processor.py +0 -67
  323. data/vendor/pygments-main/external/moin-parser.py +0 -112
  324. data/vendor/pygments-main/external/pygments.bashcomp +0 -38
  325. data/vendor/pygments-main/external/rst-directive.py +0 -82
  326. data/vendor/pygments-main/pygmentize +0 -8
  327. data/vendor/pygments-main/requirements.txt +0 -5
  328. data/vendor/pygments-main/scripts/check_sources.py +0 -211
  329. data/vendor/pygments-main/scripts/debug_lexer.py +0 -246
  330. data/vendor/pygments-main/scripts/detect_missing_analyse_text.py +0 -33
  331. data/vendor/pygments-main/scripts/epydoc.css +0 -280
  332. data/vendor/pygments-main/scripts/get_vimkw.py +0 -74
  333. data/vendor/pygments-main/scripts/pylintrc +0 -301
  334. data/vendor/pygments-main/scripts/vim2pygments.py +0 -935
  335. data/vendor/pygments-main/setup.cfg +0 -10
  336. data/vendor/pygments-main/setup.py +0 -77
  337. data/vendor/pygments-main/tox.ini +0 -7
  338. data/vendor/simplejson/.gitignore +0 -10
  339. data/vendor/simplejson/.travis.yml +0 -5
  340. data/vendor/simplejson/CHANGES.txt +0 -291
  341. data/vendor/simplejson/LICENSE.txt +0 -19
  342. data/vendor/simplejson/MANIFEST.in +0 -5
  343. data/vendor/simplejson/README.rst +0 -19
  344. data/vendor/simplejson/conf.py +0 -179
  345. data/vendor/simplejson/index.rst +0 -628
  346. data/vendor/simplejson/scripts/make_docs.py +0 -18
  347. data/vendor/simplejson/setup.py +0 -104
  348. data/vendor/simplejson/simplejson/__init__.py +0 -510
  349. data/vendor/simplejson/simplejson/_speedups.c +0 -2745
  350. data/vendor/simplejson/simplejson/decoder.py +0 -425
  351. data/vendor/simplejson/simplejson/encoder.py +0 -567
  352. data/vendor/simplejson/simplejson/ordered_dict.py +0 -119
  353. data/vendor/simplejson/simplejson/scanner.py +0 -77
  354. data/vendor/simplejson/simplejson/tests/__init__.py +0 -67
  355. data/vendor/simplejson/simplejson/tests/test_bigint_as_string.py +0 -55
  356. data/vendor/simplejson/simplejson/tests/test_check_circular.py +0 -30
  357. data/vendor/simplejson/simplejson/tests/test_decimal.py +0 -66
  358. data/vendor/simplejson/simplejson/tests/test_decode.py +0 -83
  359. data/vendor/simplejson/simplejson/tests/test_default.py +0 -9
  360. data/vendor/simplejson/simplejson/tests/test_dump.py +0 -67
  361. data/vendor/simplejson/simplejson/tests/test_encode_basestring_ascii.py +0 -46
  362. data/vendor/simplejson/simplejson/tests/test_encode_for_html.py +0 -32
  363. data/vendor/simplejson/simplejson/tests/test_errors.py +0 -34
  364. data/vendor/simplejson/simplejson/tests/test_fail.py +0 -91
  365. data/vendor/simplejson/simplejson/tests/test_float.py +0 -19
  366. data/vendor/simplejson/simplejson/tests/test_indent.py +0 -86
  367. data/vendor/simplejson/simplejson/tests/test_item_sort_key.py +0 -20
  368. data/vendor/simplejson/simplejson/tests/test_namedtuple.py +0 -121
  369. data/vendor/simplejson/simplejson/tests/test_pass1.py +0 -76
  370. data/vendor/simplejson/simplejson/tests/test_pass2.py +0 -14
  371. data/vendor/simplejson/simplejson/tests/test_pass3.py +0 -20
  372. data/vendor/simplejson/simplejson/tests/test_recursion.py +0 -67
  373. data/vendor/simplejson/simplejson/tests/test_scanstring.py +0 -117
  374. data/vendor/simplejson/simplejson/tests/test_separators.py +0 -42
  375. data/vendor/simplejson/simplejson/tests/test_speedups.py +0 -20
  376. data/vendor/simplejson/simplejson/tests/test_tuple.py +0 -49
  377. data/vendor/simplejson/simplejson/tests/test_unicode.py +0 -109
  378. data/vendor/simplejson/simplejson/tool.py +0 -39
@@ -0,0 +1,226 @@
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ pygments.lexers.mime
4
+ ~~~~~~~~~~~~~~~~~~~~
5
+
6
+ Lexer for Multipurpose Internet Mail Extensions (MIME) data.
7
+
8
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
9
+ :license: BSD, see LICENSE for details.
10
+ """
11
+
12
+ import re
13
+
14
+ from pygments.lexer import RegexLexer, include
15
+ from pygments.lexers import get_lexer_for_mimetype
16
+ from pygments.token import Text, Name, String, Operator, Comment, Other
17
+ from pygments.util import get_int_opt, ClassNotFound
18
+
19
+ __all__ = ["MIMELexer"]
20
+
21
+
22
+ class MIMELexer(RegexLexer):
23
+ """
24
+ Lexer for Multipurpose Internet Mail Extensions (MIME) data. This lexer is
25
+ designed to process the nested mulitpart data.
26
+
27
+ It assumes that the given data contains both header and body (and is
28
+ splitted by empty line). If no valid header is found, then the entire data
29
+ would be treated as body.
30
+
31
+ Additional options accepted:
32
+
33
+ `MIME-max-level`
34
+ Max recurssion level for nested MIME structure. Any negative number
35
+ would treated as unlimited. (default: -1)
36
+
37
+ `Content-Type`
38
+ Treat the data as specific content type. Useful when header is
39
+ missing, or this lexer would try to parse from header. (default:
40
+ `text/plain`)
41
+
42
+ `Multipart-Boundary`
43
+ Set the default multipart boundary delimiter. This option is only used
44
+ when `Content-Type` is `multipart` and header is missing. This lexer
45
+ would try to parse from header by default. (default: None)
46
+
47
+ `Content-Transfer-Encoding`
48
+ Treat the data as specific encoding. Or this lexer would try to parse
49
+ from header by default. (default: None)
50
+
51
+ .. versionadded:: 2.5
52
+ """
53
+
54
+ name = "MIME"
55
+ aliases = ["mime"]
56
+ mimetypes = ["multipart/mixed",
57
+ "multipart/related",
58
+ "multipart/alternative"]
59
+
60
+ def __init__(self, **options):
61
+ super().__init__(**options)
62
+ self.boundary = options.get("Multipart-Boundary")
63
+ self.content_transfer_encoding = options.get("Content_Transfer_Encoding")
64
+ self.content_type = options.get("Content_Type", "text/plain")
65
+ self.max_nested_level = get_int_opt(options, "MIME-max-level", -1)
66
+
67
+ def analyse_text(text):
68
+ try:
69
+ header, body = text.strip().split("\n\n", 1)
70
+ if not body.strip():
71
+ return 0.1
72
+
73
+ invalid_headers = MIMELexer.tokens["header"].sub("", header)
74
+ if invalid_headers.strip():
75
+ return 0.1
76
+ else:
77
+ return 1
78
+
79
+ except ValueError:
80
+ return 0.1
81
+
82
+ def get_header_tokens(self, match):
83
+ field = match.group(1)
84
+
85
+ if field.lower() in self.attention_headers:
86
+ yield match.start(1), Name.Tag, field + ":"
87
+ yield match.start(2), Text.Whitespace, match.group(2)
88
+
89
+ pos = match.end(2)
90
+ body = match.group(3)
91
+ for i, t, v in self.get_tokens_unprocessed(body, ("root", field.lower())):
92
+ yield pos + i, t, v
93
+
94
+ else:
95
+ yield match.start(), Comment, match.group()
96
+
97
+ def get_body_tokens(self, match):
98
+ pos_body_start = match.start()
99
+ entire_body = match.group()
100
+
101
+ # skip first newline
102
+ if entire_body[0] == '\n':
103
+ yield pos_body_start, Text.Whitespace, '\n'
104
+ pos_body_start = pos_body_start + 1
105
+ entire_body = entire_body[1:]
106
+
107
+ # if it is not a mulitpart
108
+ if not self.content_type.startswith("multipart") or not self.boundary:
109
+ for i, t, v in self.get_bodypart_tokens(entire_body):
110
+ yield pos_body_start + i, t, v
111
+ return
112
+
113
+ # find boundary
114
+ bdry_pattern = r"^--%s(--)?\n" % re.escape(self.boundary)
115
+ bdry_matcher = re.compile(bdry_pattern, re.MULTILINE)
116
+
117
+ # some data has prefix text before first boundary
118
+ m = bdry_matcher.search(entire_body)
119
+ if m:
120
+ pos_part_start = pos_body_start + m.end()
121
+ pos_iter_start = lpos_end = m.end()
122
+ yield pos_body_start, Text, entire_body[:m.start()]
123
+ yield pos_body_start + lpos_end, String.Delimiter, m.group()
124
+ else:
125
+ pos_part_start = pos_body_start
126
+ pos_iter_start = 0
127
+
128
+ # process tokens of each body part
129
+ for m in bdry_matcher.finditer(entire_body, pos_iter_start):
130
+ # bodypart
131
+ lpos_start = pos_part_start - pos_body_start
132
+ lpos_end = m.start()
133
+ part = entire_body[lpos_start:lpos_end]
134
+ for i, t, v in self.get_bodypart_tokens(part):
135
+ yield pos_part_start + i, t, v
136
+
137
+ # boundary
138
+ yield pos_body_start + lpos_end, String.Delimiter, m.group()
139
+ pos_part_start = pos_body_start + m.end()
140
+
141
+ # some data has suffix text after last boundary
142
+ lpos_start = pos_part_start - pos_body_start
143
+ if lpos_start != len(entire_body):
144
+ yield pos_part_start, Text, entire_body[lpos_start:]
145
+
146
+ def get_bodypart_tokens(self, text):
147
+ # return if:
148
+ # * no content
149
+ # * no content type specific
150
+ # * content encoding is not readable
151
+ # * max recurrsion exceed
152
+ if not text.strip() or not self.content_type:
153
+ return [(0, Other, text)]
154
+
155
+ cte = self.content_transfer_encoding
156
+ if cte and cte not in {"8bit", "7bit", "quoted-printable"}:
157
+ return [(0, Other, text)]
158
+
159
+ if self.max_nested_level == 0:
160
+ return [(0, Other, text)]
161
+
162
+ # get lexer
163
+ try:
164
+ lexer = get_lexer_for_mimetype(self.content_type)
165
+ except ClassNotFound:
166
+ return [(0, Other, text)]
167
+
168
+ if isinstance(lexer, type(self)):
169
+ lexer.max_nested_level = self.max_nested_level - 1
170
+
171
+ return lexer.get_tokens_unprocessed(text)
172
+
173
+ def store_content_type(self, match):
174
+ self.content_type = match.group(1)
175
+
176
+ prefix_len = match.start(1) - match.start(0)
177
+ yield match.start(0), Text.Whitespace, match.group(0)[:prefix_len]
178
+ yield match.start(1), Name.Label, match.group(2)
179
+ yield match.end(2), String.Delimiter, '/'
180
+ yield match.start(3), Name.Label, match.group(3)
181
+
182
+ def get_content_type_subtokens(self, match):
183
+ yield match.start(1), Text, match.group(1)
184
+ yield match.start(2), Text.Whitespace, match.group(2)
185
+ yield match.start(3), Name.Attribute, match.group(3)
186
+ yield match.start(4), Operator, match.group(4)
187
+ yield match.start(5), String, match.group(5)
188
+
189
+ if match.group(3).lower() == "boundary":
190
+ boundary = match.group(5).strip()
191
+ if boundary[0] == '"' and boundary[-1] == '"':
192
+ boundary = boundary[1:-1]
193
+ self.boundary = boundary
194
+
195
+ def store_content_transfer_encoding(self, match):
196
+ self.content_transfer_encoding = match.group(0).lower()
197
+ yield match.start(0), Name.Constant, match.group(0)
198
+
199
+ attention_headers = {"content-type", "content-transfer-encoding"}
200
+
201
+ tokens = {
202
+ "root": [
203
+ (r"^([\w-]+):( *)([\s\S]*?\n)(?![ \t])", get_header_tokens),
204
+ (r"^$[\s\S]+", get_body_tokens),
205
+ ],
206
+ "header": [
207
+ # folding
208
+ (r"\n[ \t]", Text.Whitespace),
209
+ (r"\n(?![ \t])", Text.Whitespace, "#pop"),
210
+ ],
211
+ "content-type": [
212
+ include("header"),
213
+ (
214
+ r"^\s*((multipart|application|audio|font|image|model|text|video"
215
+ r"|message)/([\w-]+))",
216
+ store_content_type,
217
+ ),
218
+ (r'(;)((?:[ \t]|\n[ \t])*)([\w:-]+)(=)([\s\S]*?)(?=;|\n(?![ \t]))',
219
+ get_content_type_subtokens),
220
+ (r';[ \t]*\n(?![ \t])', Text, '#pop'),
221
+ ],
222
+ "content-transfer-encoding": [
223
+ include("header"),
224
+ (r"([\w-]+)", store_content_transfer_encoding),
225
+ ],
226
+ }
@@ -0,0 +1,958 @@
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ pygments.lexers.ml
4
+ ~~~~~~~~~~~~~~~~~~
5
+
6
+ Lexers for ML family languages.
7
+
8
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
9
+ :license: BSD, see LICENSE for details.
10
+ """
11
+
12
+ import re
13
+
14
+ from pygments.lexer import RegexLexer, include, bygroups, default, words
15
+ from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
16
+ Number, Punctuation, Error
17
+
18
+ __all__ = ['SMLLexer', 'OcamlLexer', 'OpaLexer', 'ReasonLexer', 'FStarLexer']
19
+
20
+
21
+ class SMLLexer(RegexLexer):
22
+ """
23
+ For the Standard ML language.
24
+
25
+ .. versionadded:: 1.5
26
+ """
27
+
28
+ name = 'Standard ML'
29
+ aliases = ['sml']
30
+ filenames = ['*.sml', '*.sig', '*.fun']
31
+ mimetypes = ['text/x-standardml', 'application/x-standardml']
32
+
33
+ alphanumid_reserved = {
34
+ # Core
35
+ 'abstype', 'and', 'andalso', 'as', 'case', 'datatype', 'do', 'else',
36
+ 'end', 'exception', 'fn', 'fun', 'handle', 'if', 'in', 'infix',
37
+ 'infixr', 'let', 'local', 'nonfix', 'of', 'op', 'open', 'orelse',
38
+ 'raise', 'rec', 'then', 'type', 'val', 'with', 'withtype', 'while',
39
+ # Modules
40
+ 'eqtype', 'functor', 'include', 'sharing', 'sig', 'signature',
41
+ 'struct', 'structure', 'where',
42
+ }
43
+
44
+ symbolicid_reserved = {
45
+ # Core
46
+ ':', r'\|', '=', '=>', '->', '#',
47
+ # Modules
48
+ ':>',
49
+ }
50
+
51
+ nonid_reserved = {'(', ')', '[', ']', '{', '}', ',', ';', '...', '_'}
52
+
53
+ alphanumid_re = r"[a-zA-Z][\w']*"
54
+ symbolicid_re = r"[!%&$#+\-/:<=>?@\\~`^|*]+"
55
+
56
+ # A character constant is a sequence of the form #s, where s is a string
57
+ # constant denoting a string of size one character. This setup just parses
58
+ # the entire string as either a String.Double or a String.Char (depending
59
+ # on the argument), even if the String.Char is an erronous
60
+ # multiple-character string.
61
+ def stringy(whatkind):
62
+ return [
63
+ (r'[^"\\]', whatkind),
64
+ (r'\\[\\"abtnvfr]', String.Escape),
65
+ # Control-character notation is used for codes < 32,
66
+ # where \^@ == \000
67
+ (r'\\\^[\x40-\x5e]', String.Escape),
68
+ # Docs say 'decimal digits'
69
+ (r'\\[0-9]{3}', String.Escape),
70
+ (r'\\u[0-9a-fA-F]{4}', String.Escape),
71
+ (r'\\\s+\\', String.Interpol),
72
+ (r'"', whatkind, '#pop'),
73
+ ]
74
+
75
+ # Callbacks for distinguishing tokens and reserved words
76
+ def long_id_callback(self, match):
77
+ if match.group(1) in self.alphanumid_reserved:
78
+ token = Error
79
+ else:
80
+ token = Name.Namespace
81
+ yield match.start(1), token, match.group(1)
82
+ yield match.start(2), Punctuation, match.group(2)
83
+
84
+ def end_id_callback(self, match):
85
+ if match.group(1) in self.alphanumid_reserved:
86
+ token = Error
87
+ elif match.group(1) in self.symbolicid_reserved:
88
+ token = Error
89
+ else:
90
+ token = Name
91
+ yield match.start(1), token, match.group(1)
92
+
93
+ def id_callback(self, match):
94
+ str = match.group(1)
95
+ if str in self.alphanumid_reserved:
96
+ token = Keyword.Reserved
97
+ elif str in self.symbolicid_reserved:
98
+ token = Punctuation
99
+ else:
100
+ token = Name
101
+ yield match.start(1), token, str
102
+
103
+ tokens = {
104
+ # Whitespace and comments are (almost) everywhere
105
+ 'whitespace': [
106
+ (r'\s+', Text),
107
+ (r'\(\*', Comment.Multiline, 'comment'),
108
+ ],
109
+
110
+ 'delimiters': [
111
+ # This lexer treats these delimiters specially:
112
+ # Delimiters define scopes, and the scope is how the meaning of
113
+ # the `|' is resolved - is it a case/handle expression, or function
114
+ # definition by cases? (This is not how the Definition works, but
115
+ # it's how MLton behaves, see http://mlton.org/SMLNJDeviations)
116
+ (r'\(|\[|\{', Punctuation, 'main'),
117
+ (r'\)|\]|\}', Punctuation, '#pop'),
118
+ (r'\b(let|if|local)\b(?!\')', Keyword.Reserved, ('main', 'main')),
119
+ (r'\b(struct|sig|while)\b(?!\')', Keyword.Reserved, 'main'),
120
+ (r'\b(do|else|end|in|then)\b(?!\')', Keyword.Reserved, '#pop'),
121
+ ],
122
+
123
+ 'core': [
124
+ # Punctuation that doesn't overlap symbolic identifiers
125
+ (r'(%s)' % '|'.join(re.escape(z) for z in nonid_reserved),
126
+ Punctuation),
127
+
128
+ # Special constants: strings, floats, numbers in decimal and hex
129
+ (r'#"', String.Char, 'char'),
130
+ (r'"', String.Double, 'string'),
131
+ (r'~?0x[0-9a-fA-F]+', Number.Hex),
132
+ (r'0wx[0-9a-fA-F]+', Number.Hex),
133
+ (r'0w\d+', Number.Integer),
134
+ (r'~?\d+\.\d+[eE]~?\d+', Number.Float),
135
+ (r'~?\d+\.\d+', Number.Float),
136
+ (r'~?\d+[eE]~?\d+', Number.Float),
137
+ (r'~?\d+', Number.Integer),
138
+
139
+ # Labels
140
+ (r'#\s*[1-9][0-9]*', Name.Label),
141
+ (r'#\s*(%s)' % alphanumid_re, Name.Label),
142
+ (r'#\s+(%s)' % symbolicid_re, Name.Label),
143
+ # Some reserved words trigger a special, local lexer state change
144
+ (r'\b(datatype|abstype)\b(?!\')', Keyword.Reserved, 'dname'),
145
+ (r'(?=\b(exception)\b(?!\'))', Text, ('ename')),
146
+ (r'\b(functor|include|open|signature|structure)\b(?!\')',
147
+ Keyword.Reserved, 'sname'),
148
+ (r'\b(type|eqtype)\b(?!\')', Keyword.Reserved, 'tname'),
149
+
150
+ # Regular identifiers, long and otherwise
151
+ (r'\'[\w\']*', Name.Decorator),
152
+ (r'(%s)(\.)' % alphanumid_re, long_id_callback, "dotted"),
153
+ (r'(%s)' % alphanumid_re, id_callback),
154
+ (r'(%s)' % symbolicid_re, id_callback),
155
+ ],
156
+ 'dotted': [
157
+ (r'(%s)(\.)' % alphanumid_re, long_id_callback),
158
+ (r'(%s)' % alphanumid_re, end_id_callback, "#pop"),
159
+ (r'(%s)' % symbolicid_re, end_id_callback, "#pop"),
160
+ (r'\s+', Error),
161
+ (r'\S+', Error),
162
+ ],
163
+
164
+
165
+ # Main parser (prevents errors in files that have scoping errors)
166
+ 'root': [
167
+ default('main')
168
+ ],
169
+
170
+ # In this scope, I expect '|' to not be followed by a function name,
171
+ # and I expect 'and' to be followed by a binding site
172
+ 'main': [
173
+ include('whitespace'),
174
+
175
+ # Special behavior of val/and/fun
176
+ (r'\b(val|and)\b(?!\')', Keyword.Reserved, 'vname'),
177
+ (r'\b(fun)\b(?!\')', Keyword.Reserved,
178
+ ('#pop', 'main-fun', 'fname')),
179
+
180
+ include('delimiters'),
181
+ include('core'),
182
+ (r'\S+', Error),
183
+ ],
184
+
185
+ # In this scope, I expect '|' and 'and' to be followed by a function
186
+ 'main-fun': [
187
+ include('whitespace'),
188
+
189
+ (r'\s', Text),
190
+ (r'\(\*', Comment.Multiline, 'comment'),
191
+
192
+ # Special behavior of val/and/fun
193
+ (r'\b(fun|and)\b(?!\')', Keyword.Reserved, 'fname'),
194
+ (r'\b(val)\b(?!\')', Keyword.Reserved,
195
+ ('#pop', 'main', 'vname')),
196
+
197
+ # Special behavior of '|' and '|'-manipulating keywords
198
+ (r'\|', Punctuation, 'fname'),
199
+ (r'\b(case|handle)\b(?!\')', Keyword.Reserved,
200
+ ('#pop', 'main')),
201
+
202
+ include('delimiters'),
203
+ include('core'),
204
+ (r'\S+', Error),
205
+ ],
206
+
207
+ # Character and string parsers
208
+ 'char': stringy(String.Char),
209
+ 'string': stringy(String.Double),
210
+
211
+ 'breakout': [
212
+ (r'(?=\b(%s)\b(?!\'))' % '|'.join(alphanumid_reserved), Text, '#pop'),
213
+ ],
214
+
215
+ # Dealing with what comes after module system keywords
216
+ 'sname': [
217
+ include('whitespace'),
218
+ include('breakout'),
219
+
220
+ (r'(%s)' % alphanumid_re, Name.Namespace),
221
+ default('#pop'),
222
+ ],
223
+
224
+ # Dealing with what comes after the 'fun' (or 'and' or '|') keyword
225
+ 'fname': [
226
+ include('whitespace'),
227
+ (r'\'[\w\']*', Name.Decorator),
228
+ (r'\(', Punctuation, 'tyvarseq'),
229
+
230
+ (r'(%s)' % alphanumid_re, Name.Function, '#pop'),
231
+ (r'(%s)' % symbolicid_re, Name.Function, '#pop'),
232
+
233
+ # Ignore interesting function declarations like "fun (x + y) = ..."
234
+ default('#pop'),
235
+ ],
236
+
237
+ # Dealing with what comes after the 'val' (or 'and') keyword
238
+ 'vname': [
239
+ include('whitespace'),
240
+ (r'\'[\w\']*', Name.Decorator),
241
+ (r'\(', Punctuation, 'tyvarseq'),
242
+
243
+ (r'(%s)(\s*)(=(?!%s))' % (alphanumid_re, symbolicid_re),
244
+ bygroups(Name.Variable, Text, Punctuation), '#pop'),
245
+ (r'(%s)(\s*)(=(?!%s))' % (symbolicid_re, symbolicid_re),
246
+ bygroups(Name.Variable, Text, Punctuation), '#pop'),
247
+ (r'(%s)' % alphanumid_re, Name.Variable, '#pop'),
248
+ (r'(%s)' % symbolicid_re, Name.Variable, '#pop'),
249
+
250
+ # Ignore interesting patterns like 'val (x, y)'
251
+ default('#pop'),
252
+ ],
253
+
254
+ # Dealing with what comes after the 'type' (or 'and') keyword
255
+ 'tname': [
256
+ include('whitespace'),
257
+ include('breakout'),
258
+
259
+ (r'\'[\w\']*', Name.Decorator),
260
+ (r'\(', Punctuation, 'tyvarseq'),
261
+ (r'=(?!%s)' % symbolicid_re, Punctuation, ('#pop', 'typbind')),
262
+
263
+ (r'(%s)' % alphanumid_re, Keyword.Type),
264
+ (r'(%s)' % symbolicid_re, Keyword.Type),
265
+ (r'\S+', Error, '#pop'),
266
+ ],
267
+
268
+ # A type binding includes most identifiers
269
+ 'typbind': [
270
+ include('whitespace'),
271
+
272
+ (r'\b(and)\b(?!\')', Keyword.Reserved, ('#pop', 'tname')),
273
+
274
+ include('breakout'),
275
+ include('core'),
276
+ (r'\S+', Error, '#pop'),
277
+ ],
278
+
279
+ # Dealing with what comes after the 'datatype' (or 'and') keyword
280
+ 'dname': [
281
+ include('whitespace'),
282
+ include('breakout'),
283
+
284
+ (r'\'[\w\']*', Name.Decorator),
285
+ (r'\(', Punctuation, 'tyvarseq'),
286
+ (r'(=)(\s*)(datatype)',
287
+ bygroups(Punctuation, Text, Keyword.Reserved), '#pop'),
288
+ (r'=(?!%s)' % symbolicid_re, Punctuation,
289
+ ('#pop', 'datbind', 'datcon')),
290
+
291
+ (r'(%s)' % alphanumid_re, Keyword.Type),
292
+ (r'(%s)' % symbolicid_re, Keyword.Type),
293
+ (r'\S+', Error, '#pop'),
294
+ ],
295
+
296
+ # common case - A | B | C of int
297
+ 'datbind': [
298
+ include('whitespace'),
299
+
300
+ (r'\b(and)\b(?!\')', Keyword.Reserved, ('#pop', 'dname')),
301
+ (r'\b(withtype)\b(?!\')', Keyword.Reserved, ('#pop', 'tname')),
302
+ (r'\b(of)\b(?!\')', Keyword.Reserved),
303
+
304
+ (r'(\|)(\s*)(%s)' % alphanumid_re,
305
+ bygroups(Punctuation, Text, Name.Class)),
306
+ (r'(\|)(\s+)(%s)' % symbolicid_re,
307
+ bygroups(Punctuation, Text, Name.Class)),
308
+
309
+ include('breakout'),
310
+ include('core'),
311
+ (r'\S+', Error),
312
+ ],
313
+
314
+ # Dealing with what comes after an exception
315
+ 'ename': [
316
+ include('whitespace'),
317
+
318
+ (r'(exception|and)\b(\s+)(%s)' % alphanumid_re,
319
+ bygroups(Keyword.Reserved, Text, Name.Class)),
320
+ (r'(exception|and)\b(\s*)(%s)' % symbolicid_re,
321
+ bygroups(Keyword.Reserved, Text, Name.Class)),
322
+ (r'\b(of)\b(?!\')', Keyword.Reserved),
323
+
324
+ include('breakout'),
325
+ include('core'),
326
+ (r'\S+', Error),
327
+ ],
328
+
329
+ 'datcon': [
330
+ include('whitespace'),
331
+ (r'(%s)' % alphanumid_re, Name.Class, '#pop'),
332
+ (r'(%s)' % symbolicid_re, Name.Class, '#pop'),
333
+ (r'\S+', Error, '#pop'),
334
+ ],
335
+
336
+ # Series of type variables
337
+ 'tyvarseq': [
338
+ (r'\s', Text),
339
+ (r'\(\*', Comment.Multiline, 'comment'),
340
+
341
+ (r'\'[\w\']*', Name.Decorator),
342
+ (alphanumid_re, Name),
343
+ (r',', Punctuation),
344
+ (r'\)', Punctuation, '#pop'),
345
+ (symbolicid_re, Name),
346
+ ],
347
+
348
+ 'comment': [
349
+ (r'[^(*)]', Comment.Multiline),
350
+ (r'\(\*', Comment.Multiline, '#push'),
351
+ (r'\*\)', Comment.Multiline, '#pop'),
352
+ (r'[(*)]', Comment.Multiline),
353
+ ],
354
+ }
355
+
356
+
357
+ class OcamlLexer(RegexLexer):
358
+ """
359
+ For the OCaml language.
360
+
361
+ .. versionadded:: 0.7
362
+ """
363
+
364
+ name = 'OCaml'
365
+ aliases = ['ocaml']
366
+ filenames = ['*.ml', '*.mli', '*.mll', '*.mly']
367
+ mimetypes = ['text/x-ocaml']
368
+
369
+ keywords = (
370
+ 'as', 'assert', 'begin', 'class', 'constraint', 'do', 'done',
371
+ 'downto', 'else', 'end', 'exception', 'external', 'false',
372
+ 'for', 'fun', 'function', 'functor', 'if', 'in', 'include',
373
+ 'inherit', 'initializer', 'lazy', 'let', 'match', 'method',
374
+ 'module', 'mutable', 'new', 'object', 'of', 'open', 'private',
375
+ 'raise', 'rec', 'sig', 'struct', 'then', 'to', 'true', 'try',
376
+ 'type', 'value', 'val', 'virtual', 'when', 'while', 'with',
377
+ )
378
+ keyopts = (
379
+ '!=', '#', '&', '&&', r'\(', r'\)', r'\*', r'\+', ',', '-',
380
+ r'-\.', '->', r'\.', r'\.\.', ':', '::', ':=', ':>', ';', ';;', '<',
381
+ '<-', '=', '>', '>]', r'>\}', r'\?', r'\?\?', r'\[', r'\[<', r'\[>',
382
+ r'\[\|', ']', '_', '`', r'\{', r'\{<', r'\|', r'\|]', r'\}', '~'
383
+ )
384
+
385
+ operators = r'[!$%&*+\./:<=>?@^|~-]'
386
+ word_operators = ('and', 'asr', 'land', 'lor', 'lsl', 'lxor', 'mod', 'or')
387
+ prefix_syms = r'[!?~]'
388
+ infix_syms = r'[=<>@^|&+\*/$%-]'
389
+ primitives = ('unit', 'int', 'float', 'bool', 'string', 'char', 'list', 'array')
390
+
391
+ tokens = {
392
+ 'escape-sequence': [
393
+ (r'\\[\\"\'ntbr]', String.Escape),
394
+ (r'\\[0-9]{3}', String.Escape),
395
+ (r'\\x[0-9a-fA-F]{2}', String.Escape),
396
+ ],
397
+ 'root': [
398
+ (r'\s+', Text),
399
+ (r'false|true|\(\)|\[\]', Name.Builtin.Pseudo),
400
+ (r'\b([A-Z][\w\']*)(?=\s*\.)', Name.Namespace, 'dotted'),
401
+ (r'\b([A-Z][\w\']*)', Name.Class),
402
+ (r'\(\*(?![)])', Comment, 'comment'),
403
+ (r'\b(%s)\b' % '|'.join(keywords), Keyword),
404
+ (r'(%s)' % '|'.join(keyopts[::-1]), Operator),
405
+ (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
406
+ (r'\b(%s)\b' % '|'.join(word_operators), Operator.Word),
407
+ (r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),
408
+
409
+ (r"[^\W\d][\w']*", Name),
410
+
411
+ (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float),
412
+ (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
413
+ (r'0[oO][0-7][0-7_]*', Number.Oct),
414
+ (r'0[bB][01][01_]*', Number.Bin),
415
+ (r'\d[\d_]*', Number.Integer),
416
+
417
+ (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'",
418
+ String.Char),
419
+ (r"'.'", String.Char),
420
+ (r"'", Keyword), # a stray quote is another syntax element
421
+
422
+ (r'"', String.Double, 'string'),
423
+
424
+ (r'[~?][a-z][\w\']*:', Name.Variable),
425
+ ],
426
+ 'comment': [
427
+ (r'[^(*)]+', Comment),
428
+ (r'\(\*', Comment, '#push'),
429
+ (r'\*\)', Comment, '#pop'),
430
+ (r'[(*)]', Comment),
431
+ ],
432
+ 'string': [
433
+ (r'[^\\"]+', String.Double),
434
+ include('escape-sequence'),
435
+ (r'\\\n', String.Double),
436
+ (r'"', String.Double, '#pop'),
437
+ ],
438
+ 'dotted': [
439
+ (r'\s+', Text),
440
+ (r'\.', Punctuation),
441
+ (r'[A-Z][\w\']*(?=\s*\.)', Name.Namespace),
442
+ (r'[A-Z][\w\']*', Name.Class, '#pop'),
443
+ (r'[a-z_][\w\']*', Name, '#pop'),
444
+ default('#pop'),
445
+ ],
446
+ }
447
+
448
+ class OpaLexer(RegexLexer):
449
+ """
450
+ Lexer for the Opa language (http://opalang.org).
451
+
452
+ .. versionadded:: 1.5
453
+ """
454
+
455
+ name = 'Opa'
456
+ aliases = ['opa']
457
+ filenames = ['*.opa']
458
+ mimetypes = ['text/x-opa']
459
+
460
+ # most of these aren't strictly keywords
461
+ # but if you color only real keywords, you might just
462
+ # as well not color anything
463
+ keywords = (
464
+ 'and', 'as', 'begin', 'case', 'client', 'css', 'database', 'db', 'do',
465
+ 'else', 'end', 'external', 'forall', 'function', 'if', 'import',
466
+ 'match', 'module', 'or', 'package', 'parser', 'rec', 'server', 'then',
467
+ 'type', 'val', 'with', 'xml_parser',
468
+ )
469
+
470
+ # matches both stuff and `stuff`
471
+ ident_re = r'(([a-zA-Z_]\w*)|(`[^`]*`))'
472
+
473
+ op_re = r'[.=\-<>,@~%/+?*&^!]'
474
+ punc_re = r'[()\[\],;|]' # '{' and '}' are treated elsewhere
475
+ # because they are also used for inserts
476
+
477
+ tokens = {
478
+ # copied from the caml lexer, should be adapted
479
+ 'escape-sequence': [
480
+ (r'\\[\\"\'ntr}]', String.Escape),
481
+ (r'\\[0-9]{3}', String.Escape),
482
+ (r'\\x[0-9a-fA-F]{2}', String.Escape),
483
+ ],
484
+
485
+ # factorizing these rules, because they are inserted many times
486
+ 'comments': [
487
+ (r'/\*', Comment, 'nested-comment'),
488
+ (r'//.*?$', Comment),
489
+ ],
490
+ 'comments-and-spaces': [
491
+ include('comments'),
492
+ (r'\s+', Text),
493
+ ],
494
+
495
+ 'root': [
496
+ include('comments-and-spaces'),
497
+ # keywords
498
+ (words(keywords, prefix=r'\b', suffix=r'\b'), Keyword),
499
+ # directives
500
+ # we could parse the actual set of directives instead of anything
501
+ # starting with @, but this is troublesome
502
+ # because it needs to be adjusted all the time
503
+ # and assuming we parse only sources that compile, it is useless
504
+ (r'@' + ident_re + r'\b', Name.Builtin.Pseudo),
505
+
506
+ # number literals
507
+ (r'-?.[\d]+([eE][+\-]?\d+)', Number.Float),
508
+ (r'-?\d+.\d*([eE][+\-]?\d+)', Number.Float),
509
+ (r'-?\d+[eE][+\-]?\d+', Number.Float),
510
+ (r'0[xX][\da-fA-F]+', Number.Hex),
511
+ (r'0[oO][0-7]+', Number.Oct),
512
+ (r'0[bB][01]+', Number.Bin),
513
+ (r'\d+', Number.Integer),
514
+ # color literals
515
+ (r'#[\da-fA-F]{3,6}', Number.Integer),
516
+
517
+ # string literals
518
+ (r'"', String.Double, 'string'),
519
+ # char literal, should be checked because this is the regexp from
520
+ # the caml lexer
521
+ (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2})|.)'",
522
+ String.Char),
523
+
524
+ # this is meant to deal with embedded exprs in strings
525
+ # every time we find a '}' we pop a state so that if we were
526
+ # inside a string, we are back in the string state
527
+ # as a consequence, we must also push a state every time we find a
528
+ # '{' or else we will have errors when parsing {} for instance
529
+ (r'\{', Operator, '#push'),
530
+ (r'\}', Operator, '#pop'),
531
+
532
+ # html literals
533
+ # this is a much more strict that the actual parser,
534
+ # since a<b would not be parsed as html
535
+ # but then again, the parser is way too lax, and we can't hope
536
+ # to have something as tolerant
537
+ (r'<(?=[a-zA-Z>])', String.Single, 'html-open-tag'),
538
+
539
+ # db path
540
+ # matching the '[_]' in '/a[_]' because it is a part
541
+ # of the syntax of the db path definition
542
+ # unfortunately, i don't know how to match the ']' in
543
+ # /a[1], so this is somewhat inconsistent
544
+ (r'[@?!]?(/\w+)+(\[_\])?', Name.Variable),
545
+ # putting the same color on <- as on db path, since
546
+ # it can be used only to mean Db.write
547
+ (r'<-(?!'+op_re+r')', Name.Variable),
548
+
549
+ # 'modules'
550
+ # although modules are not distinguished by their names as in caml
551
+ # the standard library seems to follow the convention that modules
552
+ # only area capitalized
553
+ (r'\b([A-Z]\w*)(?=\.)', Name.Namespace),
554
+
555
+ # operators
556
+ # = has a special role because this is the only
557
+ # way to syntactic distinguish binding constructions
558
+ # unfortunately, this colors the equal in {x=2} too
559
+ (r'=(?!'+op_re+r')', Keyword),
560
+ (r'(%s)+' % op_re, Operator),
561
+ (r'(%s)+' % punc_re, Operator),
562
+
563
+ # coercions
564
+ (r':', Operator, 'type'),
565
+ # type variables
566
+ # we need this rule because we don't parse specially type
567
+ # definitions so in "type t('a) = ...", "'a" is parsed by 'root'
568
+ ("'"+ident_re, Keyword.Type),
569
+
570
+ # id literal, #something, or #{expr}
571
+ (r'#'+ident_re, String.Single),
572
+ (r'#(?=\{)', String.Single),
573
+
574
+ # identifiers
575
+ # this avoids to color '2' in 'a2' as an integer
576
+ (ident_re, Text),
577
+
578
+ # default, not sure if that is needed or not
579
+ # (r'.', Text),
580
+ ],
581
+
582
+ # it is quite painful to have to parse types to know where they end
583
+ # this is the general rule for a type
584
+ # a type is either:
585
+ # * -> ty
586
+ # * type-with-slash
587
+ # * type-with-slash -> ty
588
+ # * type-with-slash (, type-with-slash)+ -> ty
589
+ #
590
+ # the code is pretty funky in here, but this code would roughly
591
+ # translate in caml to:
592
+ # let rec type stream =
593
+ # match stream with
594
+ # | [< "->"; stream >] -> type stream
595
+ # | [< ""; stream >] ->
596
+ # type_with_slash stream
597
+ # type_lhs_1 stream;
598
+ # and type_1 stream = ...
599
+ 'type': [
600
+ include('comments-and-spaces'),
601
+ (r'->', Keyword.Type),
602
+ default(('#pop', 'type-lhs-1', 'type-with-slash')),
603
+ ],
604
+
605
+ # parses all the atomic or closed constructions in the syntax of type
606
+ # expressions: record types, tuple types, type constructors, basic type
607
+ # and type variables
608
+ 'type-1': [
609
+ include('comments-and-spaces'),
610
+ (r'\(', Keyword.Type, ('#pop', 'type-tuple')),
611
+ (r'~?\{', Keyword.Type, ('#pop', 'type-record')),
612
+ (ident_re+r'\(', Keyword.Type, ('#pop', 'type-tuple')),
613
+ (ident_re, Keyword.Type, '#pop'),
614
+ ("'"+ident_re, Keyword.Type),
615
+ # this case is not in the syntax but sometimes
616
+ # we think we are parsing types when in fact we are parsing
617
+ # some css, so we just pop the states until we get back into
618
+ # the root state
619
+ default('#pop'),
620
+ ],
621
+
622
+ # type-with-slash is either:
623
+ # * type-1
624
+ # * type-1 (/ type-1)+
625
+ 'type-with-slash': [
626
+ include('comments-and-spaces'),
627
+ default(('#pop', 'slash-type-1', 'type-1')),
628
+ ],
629
+ 'slash-type-1': [
630
+ include('comments-and-spaces'),
631
+ ('/', Keyword.Type, ('#pop', 'type-1')),
632
+ # same remark as above
633
+ default('#pop'),
634
+ ],
635
+
636
+ # we go in this state after having parsed a type-with-slash
637
+ # while trying to parse a type
638
+ # and at this point we must determine if we are parsing an arrow
639
+ # type (in which case we must continue parsing) or not (in which
640
+ # case we stop)
641
+ 'type-lhs-1': [
642
+ include('comments-and-spaces'),
643
+ (r'->', Keyword.Type, ('#pop', 'type')),
644
+ (r'(?=,)', Keyword.Type, ('#pop', 'type-arrow')),
645
+ default('#pop'),
646
+ ],
647
+ 'type-arrow': [
648
+ include('comments-and-spaces'),
649
+ # the look ahead here allows to parse f(x : int, y : float -> truc)
650
+ # correctly
651
+ (r',(?=[^:]*?->)', Keyword.Type, 'type-with-slash'),
652
+ (r'->', Keyword.Type, ('#pop', 'type')),
653
+ # same remark as above
654
+ default('#pop'),
655
+ ],
656
+
657
+ # no need to do precise parsing for tuples and records
658
+ # because they are closed constructions, so we can simply
659
+ # find the closing delimiter
660
+ # note that this function would be not work if the source
661
+ # contained identifiers like `{)` (although it could be patched
662
+ # to support it)
663
+ 'type-tuple': [
664
+ include('comments-and-spaces'),
665
+ (r'[^()/*]+', Keyword.Type),
666
+ (r'[/*]', Keyword.Type),
667
+ (r'\(', Keyword.Type, '#push'),
668
+ (r'\)', Keyword.Type, '#pop'),
669
+ ],
670
+ 'type-record': [
671
+ include('comments-and-spaces'),
672
+ (r'[^{}/*]+', Keyword.Type),
673
+ (r'[/*]', Keyword.Type),
674
+ (r'\{', Keyword.Type, '#push'),
675
+ (r'\}', Keyword.Type, '#pop'),
676
+ ],
677
+
678
+ # 'type-tuple': [
679
+ # include('comments-and-spaces'),
680
+ # (r'\)', Keyword.Type, '#pop'),
681
+ # default(('#pop', 'type-tuple-1', 'type-1')),
682
+ # ],
683
+ # 'type-tuple-1': [
684
+ # include('comments-and-spaces'),
685
+ # (r',?\s*\)', Keyword.Type, '#pop'), # ,) is a valid end of tuple, in (1,)
686
+ # (r',', Keyword.Type, 'type-1'),
687
+ # ],
688
+ # 'type-record':[
689
+ # include('comments-and-spaces'),
690
+ # (r'\}', Keyword.Type, '#pop'),
691
+ # (r'~?(?:\w+|`[^`]*`)', Keyword.Type, 'type-record-field-expr'),
692
+ # ],
693
+ # 'type-record-field-expr': [
694
+ #
695
+ # ],
696
+
697
+ 'nested-comment': [
698
+ (r'[^/*]+', Comment),
699
+ (r'/\*', Comment, '#push'),
700
+ (r'\*/', Comment, '#pop'),
701
+ (r'[/*]', Comment),
702
+ ],
703
+
704
+ # the copy pasting between string and single-string
705
+ # is kinda sad. Is there a way to avoid that??
706
+ 'string': [
707
+ (r'[^\\"{]+', String.Double),
708
+ (r'"', String.Double, '#pop'),
709
+ (r'\{', Operator, 'root'),
710
+ include('escape-sequence'),
711
+ ],
712
+ 'single-string': [
713
+ (r'[^\\\'{]+', String.Double),
714
+ (r'\'', String.Double, '#pop'),
715
+ (r'\{', Operator, 'root'),
716
+ include('escape-sequence'),
717
+ ],
718
+
719
+ # all the html stuff
720
+ # can't really reuse some existing html parser
721
+ # because we must be able to parse embedded expressions
722
+
723
+ # we are in this state after someone parsed the '<' that
724
+ # started the html literal
725
+ 'html-open-tag': [
726
+ (r'[\w\-:]+', String.Single, ('#pop', 'html-attr')),
727
+ (r'>', String.Single, ('#pop', 'html-content')),
728
+ ],
729
+
730
+ # we are in this state after someone parsed the '</' that
731
+ # started the end of the closing tag
732
+ 'html-end-tag': [
733
+ # this is a star, because </> is allowed
734
+ (r'[\w\-:]*>', String.Single, '#pop'),
735
+ ],
736
+
737
+ # we are in this state after having parsed '<ident(:ident)?'
738
+ # we thus parse a possibly empty list of attributes
739
+ 'html-attr': [
740
+ (r'\s+', Text),
741
+ (r'[\w\-:]+=', String.Single, 'html-attr-value'),
742
+ (r'/>', String.Single, '#pop'),
743
+ (r'>', String.Single, ('#pop', 'html-content')),
744
+ ],
745
+
746
+ 'html-attr-value': [
747
+ (r"'", String.Single, ('#pop', 'single-string')),
748
+ (r'"', String.Single, ('#pop', 'string')),
749
+ (r'#'+ident_re, String.Single, '#pop'),
750
+ (r'#(?=\{)', String.Single, ('#pop', 'root')),
751
+ (r'[^"\'{`=<>]+', String.Single, '#pop'),
752
+ (r'\{', Operator, ('#pop', 'root')), # this is a tail call!
753
+ ],
754
+
755
+ # we should probably deal with '\' escapes here
756
+ 'html-content': [
757
+ (r'<!--', Comment, 'html-comment'),
758
+ (r'</', String.Single, ('#pop', 'html-end-tag')),
759
+ (r'<', String.Single, 'html-open-tag'),
760
+ (r'\{', Operator, 'root'),
761
+ (r'[^<{]+', String.Single),
762
+ ],
763
+
764
+ 'html-comment': [
765
+ (r'-->', Comment, '#pop'),
766
+ (r'[^\-]+|-', Comment),
767
+ ],
768
+ }
769
+
770
+
771
+ class ReasonLexer(RegexLexer):
772
+ """
773
+ For the ReasonML language (https://reasonml.github.io/).
774
+
775
+ .. versionadded:: 2.6
776
+ """
777
+
778
+ name = 'ReasonML'
779
+ aliases = ['reason', "reasonml"]
780
+ filenames = ['*.re', '*.rei']
781
+ mimetypes = ['text/x-reasonml']
782
+
783
+ keywords = (
784
+ 'as', 'assert', 'begin', 'class', 'constraint', 'do', 'done', 'downto',
785
+ 'else', 'end', 'exception', 'external', 'false', 'for', 'fun', 'esfun',
786
+ 'function', 'functor', 'if', 'in', 'include', 'inherit', 'initializer', 'lazy',
787
+ 'let', 'switch', 'module', 'pub', 'mutable', 'new', 'nonrec', 'object', 'of',
788
+ 'open', 'pri', 'rec', 'sig', 'struct', 'then', 'to', 'true', 'try',
789
+ 'type', 'val', 'virtual', 'when', 'while', 'with',
790
+ )
791
+ keyopts = (
792
+ '!=', '#', '&', '&&', r'\(', r'\)', r'\*', r'\+', ',', '-',
793
+ r'-\.', '=>', r'\.', r'\.\.', r'\.\.\.', ':', '::', ':=', ':>', ';', ';;', '<',
794
+ '<-', '=', '>', '>]', r'>\}', r'\?', r'\?\?', r'\[', r'\[<', r'\[>',
795
+ r'\[\|', ']', '_', '`', r'\{', r'\{<', r'\|', r'\|\|', r'\|]', r'\}', '~'
796
+ )
797
+
798
+ operators = r'[!$%&*+\./:<=>?@^|~-]'
799
+ word_operators = ('and', 'asr', 'land', 'lor', 'lsl', 'lsr', 'lxor', 'mod', 'or')
800
+ prefix_syms = r'[!?~]'
801
+ infix_syms = r'[=<>@^|&+\*/$%-]'
802
+ primitives = ('unit', 'int', 'float', 'bool', 'string', 'char', 'list', 'array')
803
+
804
+ tokens = {
805
+ 'escape-sequence': [
806
+ (r'\\[\\"\'ntbr]', String.Escape),
807
+ (r'\\[0-9]{3}', String.Escape),
808
+ (r'\\x[0-9a-fA-F]{2}', String.Escape),
809
+ ],
810
+ 'root': [
811
+ (r'\s+', Text),
812
+ (r'false|true|\(\)|\[\]', Name.Builtin.Pseudo),
813
+ (r'\b([A-Z][\w\']*)(?=\s*\.)', Name.Namespace, 'dotted'),
814
+ (r'\b([A-Z][\w\']*)', Name.Class),
815
+ (r'//.*?\n', Comment.Single),
816
+ (r'\/\*(?!/)', Comment.Multiline, 'comment'),
817
+ (r'\b(%s)\b' % '|'.join(keywords), Keyword),
818
+ (r'(%s)' % '|'.join(keyopts[::-1]), Operator.Word),
819
+ (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
820
+ (r'\b(%s)\b' % '|'.join(word_operators), Operator.Word),
821
+ (r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),
822
+
823
+ (r"[^\W\d][\w']*", Name),
824
+
825
+ (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float),
826
+ (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
827
+ (r'0[oO][0-7][0-7_]*', Number.Oct),
828
+ (r'0[bB][01][01_]*', Number.Bin),
829
+ (r'\d[\d_]*', Number.Integer),
830
+
831
+ (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'",
832
+ String.Char),
833
+ (r"'.'", String.Char),
834
+ (r"'", Keyword),
835
+
836
+ (r'"', String.Double, 'string'),
837
+
838
+ (r'[~?][a-z][\w\']*:', Name.Variable),
839
+ ],
840
+ 'comment': [
841
+ (r'[^/*]+', Comment.Multiline),
842
+ (r'\/\*', Comment.Multiline, '#push'),
843
+ (r'\*\/', Comment.Multiline, '#pop'),
844
+ (r'\*', Comment.Multiline),
845
+ ],
846
+ 'string': [
847
+ (r'[^\\"]+', String.Double),
848
+ include('escape-sequence'),
849
+ (r'\\\n', String.Double),
850
+ (r'"', String.Double, '#pop'),
851
+ ],
852
+ 'dotted': [
853
+ (r'\s+', Text),
854
+ (r'\.', Punctuation),
855
+ (r'[A-Z][\w\']*(?=\s*\.)', Name.Namespace),
856
+ (r'[A-Z][\w\']*', Name.Class, '#pop'),
857
+ (r'[a-z_][\w\']*', Name, '#pop'),
858
+ default('#pop'),
859
+ ],
860
+ }
861
+
862
+
863
+ class FStarLexer(RegexLexer):
864
+ """
865
+ For the F* language (https://www.fstar-lang.org/).
866
+ .. versionadded:: 2.7
867
+ """
868
+
869
+ name = 'FStar'
870
+ aliases = ['fstar']
871
+ filenames = ['*.fst', '*.fsti']
872
+ mimetypes = ['text/x-fstar']
873
+
874
+ keywords = (
875
+ 'abstract', 'attributes', 'noeq', 'unopteq', 'and'
876
+ 'begin', 'by', 'default', 'effect', 'else', 'end', 'ensures',
877
+ 'exception', 'exists', 'false', 'forall', 'fun', 'function', 'if',
878
+ 'in', 'include', 'inline', 'inline_for_extraction', 'irreducible',
879
+ 'logic', 'match', 'module', 'mutable', 'new', 'new_effect', 'noextract',
880
+ 'of', 'open', 'opaque', 'private', 'range_of', 'reifiable',
881
+ 'reify', 'reflectable', 'requires', 'set_range_of', 'sub_effect',
882
+ 'synth', 'then', 'total', 'true', 'try', 'type', 'unfold', 'unfoldable',
883
+ 'val', 'when', 'with', 'not'
884
+ )
885
+ decl_keywords = ('let', 'rec')
886
+ assume_keywords = ('assume', 'admit', 'assert', 'calc')
887
+ keyopts = (
888
+ r'~', r'-', r'/\\', r'\\/', r'<:', r'<@', r'\(\|', r'\|\)', r'#', r'u#',
889
+ r'&', r'\(', r'\)', r'\(\)', r',', r'~>', r'->', r'<-', r'<--', r'<==>',
890
+ r'==>', r'\.', r'\?', r'\?\.', r'\.\[', r'\.\(', r'\.\(\|', r'\.\[\|',
891
+ r'\{:pattern', r':', r'::', r':=', r';', r';;', r'=', r'%\[', r'!\{',
892
+ r'\[', r'\[@', r'\[\|', r'\|>', r'\]', r'\|\]', r'\{', r'\|', r'\}', r'\$'
893
+ )
894
+
895
+ operators = r'[!$%&*+\./:<=>?@^|~-]'
896
+ prefix_syms = r'[!?~]'
897
+ infix_syms = r'[=<>@^|&+\*/$%-]'
898
+ primitives = ('unit', 'int', 'float', 'bool', 'string', 'char', 'list', 'array')
899
+
900
+ tokens = {
901
+ 'escape-sequence': [
902
+ (r'\\[\\"\'ntbr]', String.Escape),
903
+ (r'\\[0-9]{3}', String.Escape),
904
+ (r'\\x[0-9a-fA-F]{2}', String.Escape),
905
+ ],
906
+ 'root': [
907
+ (r'\s+', Text),
908
+ (r'false|true|False|True|\(\)|\[\]', Name.Builtin.Pseudo),
909
+ (r'\b([A-Z][\w\']*)(?=\s*\.)', Name.Namespace, 'dotted'),
910
+ (r'\b([A-Z][\w\']*)', Name.Class),
911
+ (r'\(\*(?![)])', Comment, 'comment'),
912
+ (r'^\/\/.+$', Comment),
913
+ (r'\b(%s)\b' % '|'.join(keywords), Keyword),
914
+ (r'\b(%s)\b' % '|'.join(assume_keywords), Name.Exception),
915
+ (r'\b(%s)\b' % '|'.join(decl_keywords), Keyword.Declaration),
916
+ (r'(%s)' % '|'.join(keyopts[::-1]), Operator),
917
+ (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
918
+ (r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),
919
+
920
+ (r"[^\W\d][\w']*", Name),
921
+
922
+ (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float),
923
+ (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
924
+ (r'0[oO][0-7][0-7_]*', Number.Oct),
925
+ (r'0[bB][01][01_]*', Number.Bin),
926
+ (r'\d[\d_]*', Number.Integer),
927
+
928
+ (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'",
929
+ String.Char),
930
+ (r"'.'", String.Char),
931
+ (r"'", Keyword), # a stray quote is another syntax element
932
+ (r"\`([\w\'.]+)\`", Operator.Word), # for infix applications
933
+ (r"\`", Keyword), # for quoting
934
+ (r'"', String.Double, 'string'),
935
+
936
+ (r'[~?][a-z][\w\']*:', Name.Variable),
937
+ ],
938
+ 'comment': [
939
+ (r'[^(*)]+', Comment),
940
+ (r'\(\*', Comment, '#push'),
941
+ (r'\*\)', Comment, '#pop'),
942
+ (r'[(*)]', Comment),
943
+ ],
944
+ 'string': [
945
+ (r'[^\\"]+', String.Double),
946
+ include('escape-sequence'),
947
+ (r'\\\n', String.Double),
948
+ (r'"', String.Double, '#pop'),
949
+ ],
950
+ 'dotted': [
951
+ (r'\s+', Text),
952
+ (r'\.', Punctuation),
953
+ (r'[A-Z][\w\']*(?=\s*\.)', Name.Namespace),
954
+ (r'[A-Z][\w\']*', Name.Class, '#pop'),
955
+ (r'[a-z_][\w\']*', Name, '#pop'),
956
+ default('#pop'),
957
+ ],
958
+ }