pygments.rb 0.2.4 → 0.2.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (120) hide show
  1. data/lib/pygments/version.rb +1 -1
  2. data/vendor/pygments-main/AUTHORS +14 -0
  3. data/vendor/pygments-main/CHANGES +34 -3
  4. data/vendor/pygments-main/Makefile +1 -1
  5. data/vendor/pygments-main/docs/generate.py +1 -1
  6. data/vendor/pygments-main/external/markdown-processor.py +1 -1
  7. data/vendor/pygments-main/external/moin-parser.py +1 -1
  8. data/vendor/pygments-main/external/rst-directive-old.py +1 -1
  9. data/vendor/pygments-main/external/rst-directive.py +1 -1
  10. data/vendor/pygments-main/pygments/__init__.py +1 -1
  11. data/vendor/pygments-main/pygments/cmdline.py +4 -1
  12. data/vendor/pygments-main/pygments/console.py +1 -1
  13. data/vendor/pygments-main/pygments/filter.py +1 -1
  14. data/vendor/pygments-main/pygments/filters/__init__.py +1 -1
  15. data/vendor/pygments-main/pygments/formatter.py +1 -1
  16. data/vendor/pygments-main/pygments/formatters/__init__.py +1 -1
  17. data/vendor/pygments-main/pygments/formatters/_mapping.py +1 -1
  18. data/vendor/pygments-main/pygments/formatters/bbcode.py +1 -1
  19. data/vendor/pygments-main/pygments/formatters/html.py +2 -2
  20. data/vendor/pygments-main/pygments/formatters/img.py +1 -1
  21. data/vendor/pygments-main/pygments/formatters/latex.py +3 -2
  22. data/vendor/pygments-main/pygments/formatters/other.py +1 -1
  23. data/vendor/pygments-main/pygments/formatters/rtf.py +1 -1
  24. data/vendor/pygments-main/pygments/formatters/svg.py +1 -1
  25. data/vendor/pygments-main/pygments/formatters/terminal.py +5 -2
  26. data/vendor/pygments-main/pygments/formatters/terminal256.py +5 -2
  27. data/vendor/pygments-main/pygments/lexer.py +29 -10
  28. data/vendor/pygments-main/pygments/lexers/__init__.py +14 -11
  29. data/vendor/pygments-main/pygments/lexers/_asybuiltins.py +1 -1
  30. data/vendor/pygments-main/pygments/lexers/_clbuiltins.py +1 -1
  31. data/vendor/pygments-main/pygments/lexers/_luabuiltins.py +1 -1
  32. data/vendor/pygments-main/pygments/lexers/_mapping.py +41 -23
  33. data/vendor/pygments-main/pygments/lexers/_phpbuiltins.py +1 -1
  34. data/vendor/pygments-main/pygments/lexers/_postgres_builtins.py +1 -1
  35. data/vendor/pygments-main/pygments/lexers/_scilab_builtins.py +29 -0
  36. data/vendor/pygments-main/pygments/lexers/_vimbuiltins.py +3 -3
  37. data/vendor/pygments-main/pygments/lexers/agile.py +148 -443
  38. data/vendor/pygments-main/pygments/lexers/asm.py +5 -3
  39. data/vendor/pygments-main/pygments/lexers/compiled.py +298 -294
  40. data/vendor/pygments-main/pygments/lexers/dotnet.py +40 -34
  41. data/vendor/pygments-main/pygments/lexers/functional.py +723 -4
  42. data/vendor/pygments-main/pygments/lexers/hdl.py +228 -6
  43. data/vendor/pygments-main/pygments/lexers/jvm.py +678 -0
  44. data/vendor/pygments-main/pygments/lexers/math.py +65 -2
  45. data/vendor/pygments-main/pygments/lexers/other.py +875 -481
  46. data/vendor/pygments-main/pygments/lexers/parsers.py +1 -1
  47. data/vendor/pygments-main/pygments/lexers/shell.py +360 -0
  48. data/vendor/pygments-main/pygments/lexers/special.py +1 -1
  49. data/vendor/pygments-main/pygments/lexers/sql.py +565 -0
  50. data/vendor/pygments-main/pygments/lexers/templates.py +1 -1
  51. data/vendor/pygments-main/pygments/lexers/text.py +237 -100
  52. data/vendor/pygments-main/pygments/lexers/web.py +146 -10
  53. data/vendor/pygments-main/pygments/plugin.py +1 -1
  54. data/vendor/pygments-main/pygments/scanner.py +1 -1
  55. data/vendor/pygments-main/pygments/style.py +1 -1
  56. data/vendor/pygments-main/pygments/styles/__init__.py +2 -1
  57. data/vendor/pygments-main/pygments/styles/autumn.py +1 -1
  58. data/vendor/pygments-main/pygments/styles/borland.py +1 -1
  59. data/vendor/pygments-main/pygments/styles/bw.py +1 -1
  60. data/vendor/pygments-main/pygments/styles/colorful.py +1 -1
  61. data/vendor/pygments-main/pygments/styles/default.py +1 -1
  62. data/vendor/pygments-main/pygments/styles/emacs.py +1 -1
  63. data/vendor/pygments-main/pygments/styles/friendly.py +1 -1
  64. data/vendor/pygments-main/pygments/styles/fruity.py +1 -2
  65. data/vendor/pygments-main/pygments/styles/manni.py +1 -1
  66. data/vendor/pygments-main/pygments/styles/monokai.py +1 -1
  67. data/vendor/pygments-main/pygments/styles/murphy.py +1 -1
  68. data/vendor/pygments-main/pygments/styles/native.py +1 -1
  69. data/vendor/pygments-main/pygments/styles/pastie.py +1 -1
  70. data/vendor/pygments-main/pygments/styles/perldoc.py +1 -1
  71. data/vendor/pygments-main/pygments/styles/rrt.py +33 -0
  72. data/vendor/pygments-main/pygments/styles/tango.py +1 -1
  73. data/vendor/pygments-main/pygments/styles/trac.py +1 -1
  74. data/vendor/pygments-main/pygments/styles/vim.py +1 -1
  75. data/vendor/pygments-main/pygments/styles/vs.py +1 -1
  76. data/vendor/pygments-main/pygments/token.py +1 -1
  77. data/vendor/pygments-main/pygments/unistring.py +1 -1
  78. data/vendor/pygments-main/pygments/util.py +2 -2
  79. data/vendor/pygments-main/scripts/check_sources.py +2 -2
  80. data/vendor/pygments-main/scripts/find_codetags.py +1 -1
  81. data/vendor/pygments-main/scripts/find_error.py +5 -2
  82. data/vendor/pygments-main/scripts/get_vimkw.py +9 -4
  83. data/vendor/pygments-main/setup.py +1 -1
  84. data/vendor/pygments-main/tests/examplefiles/classes.dylan +16 -0
  85. data/vendor/pygments-main/tests/examplefiles/coq_RelationClasses +447 -0
  86. data/vendor/pygments-main/tests/examplefiles/example.cls +15 -0
  87. data/vendor/pygments-main/tests/examplefiles/example.moon +629 -0
  88. data/vendor/pygments-main/tests/examplefiles/example.p +34 -0
  89. data/vendor/pygments-main/tests/examplefiles/example.snobol +15 -0
  90. data/vendor/pygments-main/tests/examplefiles/example.u +548 -0
  91. data/vendor/pygments-main/tests/examplefiles/example_elixir.ex +363 -0
  92. data/vendor/pygments-main/tests/examplefiles/foo.sce +6 -0
  93. data/vendor/pygments-main/tests/examplefiles/http_request_example +14 -0
  94. data/vendor/pygments-main/tests/examplefiles/http_response_example +27 -0
  95. data/vendor/pygments-main/tests/examplefiles/irc.lsp +214 -0
  96. data/vendor/pygments-main/tests/examplefiles/markdown.lsp +679 -0
  97. data/vendor/pygments-main/tests/examplefiles/nemerle_sample.n +4 -2
  98. data/vendor/pygments-main/tests/examplefiles/reversi.lsp +427 -0
  99. data/vendor/pygments-main/tests/examplefiles/scilab.sci +30 -0
  100. data/vendor/pygments-main/tests/examplefiles/test.bro +250 -0
  101. data/vendor/pygments-main/tests/examplefiles/test.cs +23 -0
  102. data/vendor/pygments-main/tests/examplefiles/test.dart +23 -0
  103. data/vendor/pygments-main/tests/examplefiles/test.ecl +58 -0
  104. data/vendor/pygments-main/tests/examplefiles/test.fan +818 -0
  105. data/vendor/pygments-main/tests/examplefiles/test.ps1 +108 -0
  106. data/vendor/pygments-main/tests/examplefiles/test.vhdl +161 -0
  107. data/vendor/pygments-main/tests/old_run.py +1 -1
  108. data/vendor/pygments-main/tests/run.py +1 -1
  109. data/vendor/pygments-main/tests/test_basic_api.py +4 -3
  110. data/vendor/pygments-main/tests/test_clexer.py +1 -1
  111. data/vendor/pygments-main/tests/test_cmdline.py +1 -1
  112. data/vendor/pygments-main/tests/test_examplefiles.py +4 -3
  113. data/vendor/pygments-main/tests/test_html_formatter.py +33 -1
  114. data/vendor/pygments-main/tests/test_latex_formatter.py +1 -1
  115. data/vendor/pygments-main/tests/test_perllexer.py +137 -0
  116. data/vendor/pygments-main/tests/test_regexlexer.py +1 -1
  117. data/vendor/pygments-main/tests/test_token.py +1 -1
  118. data/vendor/pygments-main/tests/test_using_api.py +1 -1
  119. data/vendor/pygments-main/tests/test_util.py +35 -5
  120. metadata +30 -4
@@ -5,7 +5,7 @@
5
5
 
6
6
  Lexers for .net languages.
7
7
 
8
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
8
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
9
9
  :license: BSD, see LICENSE for details.
10
10
  """
11
11
  import re
@@ -92,7 +92,7 @@ class CSharpLexer(RegexLexer):
92
92
  (r'\n', Text),
93
93
  (r'[~!%^&*()+=|\[\]:;,.<>/?-]', Punctuation),
94
94
  (r'[{}]', Punctuation),
95
- (r'@"(\\\\|\\"|[^"])*"', String),
95
+ (r'@"(""|[^"])*"', String),
96
96
  (r'"(\\\\|\\"|[^"\n])*["\n]', String),
97
97
  (r"'\\.'|'[^\\]'", String.Char),
98
98
  (r"[0-9](\.[0-9]*)?([eE][+-][0-9]+)?"
@@ -111,10 +111,12 @@ class CSharpLexer(RegexLexer):
111
111
  r'ref|return|sealed|sizeof|stackalloc|static|'
112
112
  r'switch|this|throw|true|try|typeof|'
113
113
  r'unchecked|unsafe|virtual|void|while|'
114
- r'get|set|new|partial|yield|add|remove|value)\b', Keyword),
114
+ r'get|set|new|partial|yield|add|remove|value|alias|ascending|'
115
+ r'descending|from|group|into|orderby|select|where|'
116
+ r'join|equals)\b', Keyword),
115
117
  (r'(global)(::)', bygroups(Keyword, Punctuation)),
116
- (r'(bool|byte|char|decimal|double|float|int|long|object|sbyte|'
117
- r'short|string|uint|ulong|ushort)\b\??', Keyword.Type),
118
+ (r'(bool|byte|char|decimal|double|dynamic|float|int|long|object|'
119
+ r'sbyte|short|string|uint|ulong|ushort|var)\b\??', Keyword.Type),
118
120
  (r'(class|struct)(\s+)', bygroups(Keyword, Text), 'class'),
119
121
  (r'(namespace|using)(\s+)', bygroups(Keyword, Text), 'namespace'),
120
122
  (cs_ident, Name),
@@ -169,20 +171,20 @@ class NemerleLexer(RegexLexer):
169
171
 
170
172
  flags = re.MULTILINE | re.DOTALL | re.UNICODE
171
173
 
172
- # for the range of allowed unicode characters in identifiers,
173
- # see http://www.ecma-international.org/publications/files/ECMA-ST/Ecma-334.pdf
174
-
175
- levels = {
176
- 'none': '@?[_a-zA-Z][a-zA-Z0-9_]*',
177
- 'basic': ('@?[_' + uni.Lu + uni.Ll + uni.Lt + uni.Lm + uni.Nl + ']' +
178
- '[' + uni.Lu + uni.Ll + uni.Lt + uni.Lm + uni.Nl +
179
- uni.Nd + uni.Pc + uni.Cf + uni.Mn + uni.Mc + ']*'),
180
- 'full': ('@?(?:_|[^' +
181
- _escape(uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl')) + '])'
182
- + '[^' + _escape(uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo',
183
- 'Nl', 'Nd', 'Pc', 'Cf', 'Mn',
184
- 'Mc')) + ']*'),
185
- }
174
+ # for the range of allowed unicode characters in identifiers, see
175
+ # http://www.ecma-international.org/publications/files/ECMA-ST/Ecma-334.pdf
176
+
177
+ levels = dict(
178
+ none = '@?[_a-zA-Z][a-zA-Z0-9_]*',
179
+ basic = ('@?[_' + uni.Lu + uni.Ll + uni.Lt + uni.Lm + uni.Nl + ']' +
180
+ '[' + uni.Lu + uni.Ll + uni.Lt + uni.Lm + uni.Nl +
181
+ uni.Nd + uni.Pc + uni.Cf + uni.Mn + uni.Mc + ']*'),
182
+ full = ('@?(?:_|[^' + _escape(uni.allexcept('Lu', 'Ll', 'Lt', 'Lm',
183
+ 'Lo', 'Nl')) + '])'
184
+ + '[^' + _escape(uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo',
185
+ 'Nl', 'Nd', 'Pc', 'Cf', 'Mn',
186
+ 'Mc')) + ']*'),
187
+ )
186
188
 
187
189
  tokens = {}
188
190
  token_variants = True
@@ -199,7 +201,7 @@ class NemerleLexer(RegexLexer):
199
201
  (r'[^\S\n]+', Text),
200
202
  (r'\\\n', Text), # line continuation
201
203
  (r'//.*?\n', Comment.Single),
202
- (r'/[*](.|\n)*?[*]/', Comment.Multiline),
204
+ (r'/[*].*?[*]/', Comment.Multiline),
203
205
  (r'\n', Text),
204
206
  (r'\$\s*"', String, 'splice-string'),
205
207
  (r'\$\s*<#', String, 'splice-string2'),
@@ -209,15 +211,16 @@ class NemerleLexer(RegexLexer):
209
211
  (r'\]\>', Keyword),
210
212
 
211
213
  # quasiquotation only
212
- (r'\$' + cs_ident, Name),
213
- (r'(\$)(\()', bygroups(Name, Punctuation), 'splice-string-content'),
214
+ (r'\$' + cs_ident, Name),
215
+ (r'(\$)(\()', bygroups(Name, Punctuation),
216
+ 'splice-string-content'),
214
217
 
215
218
  (r'[~!%^&*()+=|\[\]:;,.<>/?-]', Punctuation),
216
219
  (r'[{}]', Punctuation),
217
- (r'@"(\\\\|\\"|[^"])*"', String),
220
+ (r'@"(""|[^"])*"', String),
218
221
  (r'"(\\\\|\\"|[^"\n])*["\n]', String),
219
222
  (r"'\\.'|'[^\\]'", String.Char),
220
- (r"0[xX][0-9a-fA-F]+[Ll]?", Number),
223
+ (r"0[xX][0-9a-fA-F]+[Ll]?", Number),
221
224
  (r"[0-9](\.[0-9]*)?([eE][+-][0-9]+)?[flFLdD]?", Number),
222
225
  (r'#[ \t]*(if|endif|else|elif|define|undef|'
223
226
  r'line|error|warning|region|endregion|pragma)\b.*?\n',
@@ -256,7 +259,7 @@ class NemerleLexer(RegexLexer):
256
259
  ('(' + cs_ident + r'|\.)+', Name.Namespace, '#pop')
257
260
  ],
258
261
  'splice-string': [
259
- (r'[^"$]', String),
262
+ (r'[^"$]', String),
260
263
  (r'\$' + cs_ident, Name),
261
264
  (r'(\$)(\()', bygroups(Name, Punctuation),
262
265
  'splice-string-content'),
@@ -264,7 +267,7 @@ class NemerleLexer(RegexLexer):
264
267
  (r'"', String, '#pop')
265
268
  ],
266
269
  'splice-string2': [
267
- (r'[^#<>$]', String),
270
+ (r'[^#<>$]', String),
268
271
  (r'\$' + cs_ident, Name),
269
272
  (r'(\$)(\()', bygroups(Name, Punctuation),
270
273
  'splice-string-content'),
@@ -278,8 +281,9 @@ class NemerleLexer(RegexLexer):
278
281
  ],
279
282
  'splice-string-content': [
280
283
  (r'if|match', Keyword),
281
- (r'[~!%^&*+=|\[\]:;,.<>/?-]', Punctuation),
282
- (cs_ident, Name),
284
+ (r'[~!%^&*+=|\[\]:;,.<>/?-\\"$ ]', Punctuation),
285
+ (cs_ident, Name),
286
+ (r'\d+', Number),
283
287
  (r'\(', Punctuation, '#push'),
284
288
  (r'\)', Punctuation, '#pop')
285
289
  ]
@@ -335,10 +339,10 @@ class BooLexer(RegexLexer):
335
339
  r'matrix|max|min|normalArrayIndexing|print|property|range|'
336
340
  r'rawArrayIndexing|required|typeof|unchecked|using|'
337
341
  r'yieldAll|zip)\b', Name.Builtin),
338
- ('"""(\\\\|\\"|.*?)"""', String.Double),
339
- ('"(\\\\|\\"|[^"]*?)"', String.Double),
340
- ("'(\\\\|\\'|[^']*?)'", String.Single),
341
- ('[a-zA-Z_][a-zA-Z0-9_]*', Name),
342
+ (r'"""(\\\\|\\"|.*?)"""', String.Double),
343
+ (r'"(\\\\|\\"|[^"]*?)"', String.Double),
344
+ (r"'(\\\\|\\'|[^']*?)'", String.Single),
345
+ (r'[a-zA-Z_][a-zA-Z0-9_]*', Name),
342
346
  (r'(\d+\.\d*|\d*\.\d+)([fF][+-]?[0-9]+)?', Number.Float),
343
347
  (r'[0-9][0-9\.]*(m|ms|d|h|s)', Number),
344
348
  (r'0\d+', Number.Oct),
@@ -461,6 +465,7 @@ class VbNetLexer(RegexLexer):
461
465
  ]
462
466
  }
463
467
 
468
+
464
469
  class GenericAspxLexer(RegexLexer):
465
470
  """
466
471
  Lexer for ASP.NET pages.
@@ -483,6 +488,7 @@ class GenericAspxLexer(RegexLexer):
483
488
  ],
484
489
  }
485
490
 
491
+
486
492
  #TODO support multiple languages within the same source file
487
493
  class CSharpAspxLexer(DelegatingLexer):
488
494
  """
@@ -503,7 +509,7 @@ class CSharpAspxLexer(DelegatingLexer):
503
509
  return 0.2
504
510
  elif re.search(r'script[^>]+language=["\']C#', text, re.I) is not None:
505
511
  return 0.15
506
- return 0.001 # TODO really only for when filename matched...
512
+
507
513
 
508
514
  class VbNetAspxLexer(DelegatingLexer):
509
515
  """
@@ -581,7 +587,7 @@ class FSharpLexer(RegexLexer):
581
587
  Name.Namespace, 'dotted'),
582
588
  (r'\b([A-Z][A-Za-z0-9_\']*)', Name.Class),
583
589
  (r'//.*?\n', Comment.Single),
584
- (r'\(\*', Comment, 'comment'),
590
+ (r'\(\*(?!\))', Comment, 'comment'),
585
591
  (r'\b(%s)\b' % '|'.join(keywords), Keyword),
586
592
  (r'(%s)' % '|'.join(keyopts), Operator),
587
593
  (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
@@ -5,7 +5,7 @@
5
5
 
6
6
  Lexers for functional languages.
7
7
 
8
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
8
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
9
9
  :license: BSD, see LICENSE for details.
10
10
  """
11
11
 
@@ -15,10 +15,10 @@ from pygments.lexer import Lexer, RegexLexer, bygroups, include, do_insertions
15
15
  from pygments.token import Text, Comment, Operator, Keyword, Name, \
16
16
  String, Number, Punctuation, Literal, Generic, Error
17
17
 
18
-
19
18
  __all__ = ['SchemeLexer', 'CommonLispLexer', 'HaskellLexer',
20
19
  'LiterateHaskellLexer', 'SMLLexer', 'OcamlLexer', 'ErlangLexer',
21
- 'ErlangShellLexer']
20
+ 'ErlangShellLexer', 'OpaLexer', 'CoqLexer', 'NewLispLexer',
21
+ 'ElixirLexer', 'ElixirConsoleLexer']
22
22
 
23
23
 
24
24
  class SchemeLexer(RegexLexer):
@@ -942,7 +942,7 @@ class ErlangLexer(RegexLexer):
942
942
 
943
943
  name = 'Erlang'
944
944
  aliases = ['erlang']
945
- filenames = ['*.erl', '*.hrl']
945
+ filenames = ['*.erl', '*.hrl', '*.es', '*.escript']
946
946
  mimetypes = ['text/x-erlang']
947
947
 
948
948
  keywords = [
@@ -1079,3 +1079,722 @@ class ErlangShellLexer(Lexer):
1079
1079
  erlexer.get_tokens_unprocessed(curcode)):
1080
1080
  yield item
1081
1081
 
1082
+
1083
+ class OpaLexer(RegexLexer):
1084
+ """
1085
+ Lexer for the Opa language (http://opalang.org).
1086
+
1087
+ *New in Pygments 1.5.*
1088
+ """
1089
+
1090
+ name = 'Opa'
1091
+ aliases = ['opa']
1092
+ filenames = ['*.opa']
1093
+ mimetypes = ['text/x-opa']
1094
+
1095
+ # most of these aren't strictly keywords
1096
+ # but if you color only real keywords, you might just
1097
+ # as well not color anything
1098
+ keywords = [
1099
+ 'and', 'as', 'begin', 'css', 'database', 'db', 'do', 'else', 'end',
1100
+ 'external', 'forall', 'if', 'import', 'match', 'package', 'parser',
1101
+ 'rec', 'server', 'then', 'type', 'val', 'with', 'xml_parser'
1102
+ ]
1103
+
1104
+ # matches both stuff and `stuff`
1105
+ ident_re = r'(([a-zA-Z_]\w*)|(`[^`]*`))'
1106
+
1107
+ op_re = r'[.=\-<>,@~%/+?*&^!]'
1108
+ punc_re = r'[()\[\],;|]' # '{' and '}' are treated elsewhere
1109
+ # because they are also used for inserts
1110
+
1111
+ tokens = {
1112
+ # copied from the caml lexer, should be adapted
1113
+ 'escape-sequence': [
1114
+ (r'\\[\\\"\'ntr}]', String.Escape),
1115
+ (r'\\[0-9]{3}', String.Escape),
1116
+ (r'\\x[0-9a-fA-F]{2}', String.Escape),
1117
+ ],
1118
+
1119
+ # factorizing these rules, because they are inserted many times
1120
+ 'comments': [
1121
+ (r'/\*', Comment, 'nested-comment'),
1122
+ (r'//.*?$', Comment),
1123
+ ],
1124
+ 'comments-and-spaces': [
1125
+ include('comments'),
1126
+ (r'\s+', Text),
1127
+ ],
1128
+
1129
+ 'root': [
1130
+ include('comments-and-spaces'),
1131
+ # keywords
1132
+ (r'\b(%s)\b' % '|'.join(keywords), Keyword),
1133
+ # directives
1134
+ # we could parse the actual set of directives instead of anything
1135
+ # starting with @, but this is troublesome
1136
+ # because it needs to be adjusted all the time
1137
+ # and assuming we parse only sources that compile, it is useless
1138
+ (r'@'+ident_re+r'\b', Name.Builtin.Pseudo),
1139
+
1140
+ # number literals
1141
+ (r'-?.[\d]+([eE][+\-]?\d+)', Number.Float),
1142
+ (r'-?\d+.\d*([eE][+\-]?\d+)', Number.Float),
1143
+ (r'-?\d+[eE][+\-]?\d+', Number.Float),
1144
+ (r'0[xX][\da-fA-F]+', Number.Hex),
1145
+ (r'0[oO][0-7]+', Number.Oct),
1146
+ (r'0[bB][01]+', Number.Binary),
1147
+ (r'\d+', Number.Integer),
1148
+ # color literals
1149
+ (r'#[\da-fA-F]{3,6}', Number.Integer),
1150
+
1151
+ # string literals
1152
+ (r'"', String.Double, 'string'),
1153
+ # char literal, should be checked because this is the regexp from
1154
+ # the caml lexer
1155
+ (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2})|.)'",
1156
+ String.Char),
1157
+
1158
+ # this is meant to deal with embedded exprs in strings
1159
+ # every time we find a '}' we pop a state so that if we were
1160
+ # inside a string, we are back in the string state
1161
+ # as a consequence, we must also push a state every time we find a
1162
+ # '{' or else we will have errors when parsing {} for instance
1163
+ (r'{', Operator, '#push'),
1164
+ (r'}', Operator, '#pop'),
1165
+
1166
+ # html literals
1167
+ # this is a much more strict that the actual parser,
1168
+ # since a<b would not be parsed as html
1169
+ # but then again, the parser is way too lax, and we can't hope
1170
+ # to have something as tolerant
1171
+ (r'<(?=[a-zA-Z>])', String.Single, 'html-open-tag'),
1172
+
1173
+ # db path
1174
+ # matching the '[_]' in '/a[_]' because it is a part
1175
+ # of the syntax of the db path definition
1176
+ # unfortunately, i don't know how to match the ']' in
1177
+ # /a[1], so this is somewhat inconsistent
1178
+ (r'[@?!]?(/\w+)+(\[_\])?', Name.Variable),
1179
+ # putting the same color on <- as on db path, since
1180
+ # it can be used only to mean Db.write
1181
+ (r'<-(?!'+op_re+r')', Name.Variable),
1182
+
1183
+ # 'modules'
1184
+ # although modules are not distinguished by their names as in caml
1185
+ # the standard library seems to follow the convention that modules
1186
+ # only area capitalized
1187
+ (r'\b([A-Z]\w*)(?=\.)', Name.Namespace),
1188
+
1189
+ # operators
1190
+ # = has a special role because this is the only
1191
+ # way to syntactic distinguish binding constructions
1192
+ # unfortunately, this colors the equal in {x=2} too
1193
+ (r'=(?!'+op_re+r')', Keyword),
1194
+ (r'(%s)+' % op_re, Operator),
1195
+ (r'(%s)+' % punc_re, Operator),
1196
+
1197
+ # coercions
1198
+ (r':', Operator, 'type'),
1199
+ # type variables
1200
+ # we need this rule because we don't parse specially type
1201
+ # definitions so in "type t('a) = ...", "'a" is parsed by 'root'
1202
+ ("'"+ident_re, Keyword.Type),
1203
+
1204
+ # id literal, #something, or #{expr}
1205
+ (r'#'+ident_re, String.Single),
1206
+ (r'#(?={)', String.Single),
1207
+
1208
+ # identifiers
1209
+ # this avoids to color '2' in 'a2' as an integer
1210
+ (ident_re, Text),
1211
+
1212
+ # default, not sure if that is needed or not
1213
+ # (r'.', Text),
1214
+ ],
1215
+
1216
+ # it is quite painful to have to parse types to know where they end
1217
+ # this is the general rule for a type
1218
+ # a type is either:
1219
+ # * -> ty
1220
+ # * type-with-slash
1221
+ # * type-with-slash -> ty
1222
+ # * type-with-slash (, type-with-slash)+ -> ty
1223
+ #
1224
+ # the code is pretty funky in here, but this code would roughly
1225
+ # translate in caml to:
1226
+ # let rec type stream =
1227
+ # match stream with
1228
+ # | [< "->"; stream >] -> type stream
1229
+ # | [< ""; stream >] ->
1230
+ # type_with_slash stream
1231
+ # type_lhs_1 stream;
1232
+ # and type_1 stream = ...
1233
+ 'type': [
1234
+ include('comments-and-spaces'),
1235
+ (r'->', Keyword.Type),
1236
+ (r'', Keyword.Type, ('#pop', 'type-lhs-1', 'type-with-slash')),
1237
+ ],
1238
+
1239
+ # parses all the atomic or closed constructions in the syntax of type
1240
+ # expressions: record types, tuple types, type constructors, basic type
1241
+ # and type variables
1242
+ 'type-1': [
1243
+ include('comments-and-spaces'),
1244
+ (r'\(', Keyword.Type, ('#pop', 'type-tuple')),
1245
+ (r'~?{', Keyword.Type, ('#pop', 'type-record')),
1246
+ (ident_re+r'\(', Keyword.Type, ('#pop', 'type-tuple')),
1247
+ (ident_re, Keyword.Type, '#pop'),
1248
+ ("'"+ident_re, Keyword.Type),
1249
+ # this case is not in the syntax but sometimes
1250
+ # we think we are parsing types when in fact we are parsing
1251
+ # some css, so we just pop the states until we get back into
1252
+ # the root state
1253
+ (r'', Keyword.Type, '#pop'),
1254
+ ],
1255
+
1256
+ # type-with-slash is either:
1257
+ # * type-1
1258
+ # * type-1 (/ type-1)+
1259
+ 'type-with-slash': [
1260
+ include('comments-and-spaces'),
1261
+ (r'', Keyword.Type, ('#pop', 'slash-type-1', 'type-1')),
1262
+ ],
1263
+ 'slash-type-1': [
1264
+ include('comments-and-spaces'),
1265
+ ('/', Keyword.Type, ('#pop', 'type-1')),
1266
+ # same remark as above
1267
+ (r'', Keyword.Type, '#pop'),
1268
+ ],
1269
+
1270
+ # we go in this state after having parsed a type-with-slash
1271
+ # while trying to parse a type
1272
+ # and at this point we must determine if we are parsing an arrow
1273
+ # type (in which case we must continue parsing) or not (in which
1274
+ # case we stop)
1275
+ 'type-lhs-1': [
1276
+ include('comments-and-spaces'),
1277
+ (r'->', Keyword.Type, ('#pop', 'type')),
1278
+ (r'(?=,)', Keyword.Type, ('#pop', 'type-arrow')),
1279
+ (r'', Keyword.Type, '#pop'),
1280
+ ],
1281
+ 'type-arrow': [
1282
+ include('comments-and-spaces'),
1283
+ # the look ahead here allows to parse f(x : int, y : float -> truc)
1284
+ # correctly
1285
+ (r',(?=[^:]*?->)', Keyword.Type, 'type-with-slash'),
1286
+ (r'->', Keyword.Type, ('#pop', 'type')),
1287
+ # same remark as above
1288
+ (r'', Keyword.Type, '#pop'),
1289
+ ],
1290
+
1291
+ # no need to do precise parsing for tuples and records
1292
+ # because they are closed constructions, so we can simply
1293
+ # find the closing delimiter
1294
+ # note that this function would be not work if the source
1295
+ # contained identifiers like `{)` (although it could be patched
1296
+ # to support it)
1297
+ 'type-tuple': [
1298
+ include('comments-and-spaces'),
1299
+ (r'[^\(\)/*]+', Keyword.Type),
1300
+ (r'[/*]', Keyword.Type),
1301
+ (r'\(', Keyword.Type, '#push'),
1302
+ (r'\)', Keyword.Type, '#pop'),
1303
+ ],
1304
+ 'type-record': [
1305
+ include('comments-and-spaces'),
1306
+ (r'[^{}/*]+', Keyword.Type),
1307
+ (r'[/*]', Keyword.Type),
1308
+ (r'{', Keyword.Type, '#push'),
1309
+ (r'}', Keyword.Type, '#pop'),
1310
+ ],
1311
+
1312
+ # 'type-tuple': [
1313
+ # include('comments-and-spaces'),
1314
+ # (r'\)', Keyword.Type, '#pop'),
1315
+ # (r'', Keyword.Type, ('#pop', 'type-tuple-1', 'type-1')),
1316
+ # ],
1317
+ # 'type-tuple-1': [
1318
+ # include('comments-and-spaces'),
1319
+ # (r',?\s*\)', Keyword.Type, '#pop'), # ,) is a valid end of tuple, in (1,)
1320
+ # (r',', Keyword.Type, 'type-1'),
1321
+ # ],
1322
+ # 'type-record':[
1323
+ # include('comments-and-spaces'),
1324
+ # (r'}', Keyword.Type, '#pop'),
1325
+ # (r'~?(?:\w+|`[^`]*`)', Keyword.Type, 'type-record-field-expr'),
1326
+ # ],
1327
+ # 'type-record-field-expr': [
1328
+ #
1329
+ # ],
1330
+
1331
+ 'nested-comment': [
1332
+ (r'[^/*]+', Comment),
1333
+ (r'/\*', Comment, '#push'),
1334
+ (r'\*/', Comment, '#pop'),
1335
+ (r'[/*]', Comment),
1336
+ ],
1337
+
1338
+ # the coy pasting between string and single-string
1339
+ # is kinda sad. Is there a way to avoid that??
1340
+ 'string': [
1341
+ (r'[^\\"{]+', String.Double),
1342
+ (r'"', String.Double, '#pop'),
1343
+ (r'{', Operator, 'root'),
1344
+ include('escape-sequence'),
1345
+ ],
1346
+ 'single-string': [
1347
+ (r'[^\\\'{]+', String.Double),
1348
+ (r'\'', String.Double, '#pop'),
1349
+ (r'{', Operator, 'root'),
1350
+ include('escape-sequence'),
1351
+ ],
1352
+
1353
+ # all the html stuff
1354
+ # can't really reuse some existing html parser
1355
+ # because we must be able to parse embedded expressions
1356
+
1357
+ # we are in this state after someone parsed the '<' that
1358
+ # started the html literal
1359
+ 'html-open-tag': [
1360
+ (r'[\w\-:]+', String.Single, ('#pop', 'html-attr')),
1361
+ (r'>', String.Single, ('#pop', 'html-content')),
1362
+ ],
1363
+
1364
+ # we are in this state after someone parsed the '</' that
1365
+ # started the end of the closing tag
1366
+ 'html-end-tag': [
1367
+ # this is a star, because </> is allowed
1368
+ (r'[\w\-:]*>', String.Single, '#pop'),
1369
+ ],
1370
+
1371
+ # we are in this state after having parsed '<ident(:ident)?'
1372
+ # we thus parse a possibly empty list of attributes
1373
+ 'html-attr': [
1374
+ (r'\s+', Text),
1375
+ (r'[\w\-:]+=', String.Single, 'html-attr-value'),
1376
+ (r'/>', String.Single, '#pop'),
1377
+ (r'>', String.Single, ('#pop', 'html-content')),
1378
+ ],
1379
+
1380
+ 'html-attr-value': [
1381
+ (r"'", String.Single, ('#pop', 'single-string')),
1382
+ (r'"', String.Single, ('#pop', 'string')),
1383
+ (r'#'+ident_re, String.Single, '#pop'),
1384
+ (r'#(?={)', String.Single, ('#pop', 'root')),
1385
+ (r'{', Operator, ('#pop', 'root')), # this is a tail call!
1386
+ ],
1387
+
1388
+ # we should probably deal with '\' escapes here
1389
+ 'html-content': [
1390
+ (r'<!--', Comment, 'html-comment'),
1391
+ (r'</', String.Single, ('#pop', 'html-end-tag')),
1392
+ (r'<', String.Single, 'html-open-tag'),
1393
+ (r'{', Operator, 'root'),
1394
+ (r'.|\s+', String.Single),
1395
+ ],
1396
+
1397
+ 'html-comment': [
1398
+ (r'-->', Comment, '#pop'),
1399
+ (r'[^\-]+|-', Comment),
1400
+ ],
1401
+ }
1402
+
1403
+
1404
+ class CoqLexer(RegexLexer):
1405
+ """
1406
+ For the `Coq <http://coq.inria.fr/>`_ theorem prover.
1407
+
1408
+ *New in Pygments 1.5.*
1409
+ """
1410
+
1411
+ name = 'Coq'
1412
+ aliases = ['coq']
1413
+ filenames = ['*.v']
1414
+ mimetypes = ['text/x-coq']
1415
+
1416
+ keywords1 = [
1417
+ # Vernacular commands
1418
+ 'Section', 'Module', 'End', 'Require', 'Import', 'Export', 'Variable',
1419
+ 'Variables', 'Parameter', 'Parameters', 'Axiom', 'Hypothesis',
1420
+ 'Hypotheses', 'Notation', 'Local', 'Tactic', 'Reserved', 'Scope',
1421
+ 'Open', 'Close', 'Bind', 'Delimit', 'Definition', 'Let', 'Ltac',
1422
+ 'Fixpoint', 'CoFixpoint', 'Morphism', 'Relation', 'Implicit',
1423
+ 'Arguments', 'Set', 'Unset', 'Contextual', 'Strict', 'Prenex',
1424
+ 'Implicits', 'Inductive', 'CoInductive', 'Record', 'Structure',
1425
+ 'Canonical', 'Coercion', 'Theorem', 'Lemma', 'Corollary',
1426
+ 'Proposition', 'Fact', 'Remark', 'Example', 'Proof', 'Goal', 'Save',
1427
+ 'Qed', 'Defined', 'Hint', 'Resolve', 'Rewrite', 'View', 'Search',
1428
+ 'Show', 'Print', 'Printing', 'All', 'Graph', 'Projections', 'inside',
1429
+ 'outside',
1430
+ ]
1431
+ keywords2 = [
1432
+ # Gallina
1433
+ 'forall', 'exists', 'exists2', 'fun', 'fix', 'cofix', 'struct',
1434
+ 'match', 'end', 'in', 'return', 'let', 'if', 'is', 'then', 'else',
1435
+ 'for', 'of', 'nosimpl', 'with', 'as',
1436
+ ]
1437
+ keywords3 = [
1438
+ # Sorts
1439
+ 'Type', 'Prop',
1440
+ ]
1441
+ keywords4 = [
1442
+ # Tactics
1443
+ 'pose', 'set', 'move', 'case', 'elim', 'apply', 'clear', 'hnf', 'intro',
1444
+ 'intros', 'generalize', 'rename', 'pattern', 'after', 'destruct',
1445
+ 'induction', 'using', 'refine', 'inversion', 'injection', 'rewrite',
1446
+ 'congr', 'unlock', 'compute', 'ring', 'field', 'replace', 'fold',
1447
+ 'unfold', 'change', 'cutrewrite', 'simpl', 'have', 'suff', 'wlog',
1448
+ 'suffices', 'without', 'loss', 'nat_norm', 'assert', 'cut', 'trivial',
1449
+ 'revert', 'bool_congr', 'nat_congr', 'symmetry', 'transitivity', 'auto',
1450
+ 'split', 'left', 'right', 'autorewrite',
1451
+ ]
1452
+ keywords5 = [
1453
+ # Terminators
1454
+ 'by', 'done', 'exact', 'reflexivity', 'tauto', 'romega', 'omega',
1455
+ 'assumption', 'solve', 'contradiction', 'discriminate',
1456
+ ]
1457
+ keywords6 = [
1458
+ # Control
1459
+ 'do', 'last', 'first', 'try', 'idtac', 'repeat',
1460
+ ]
1461
+ # 'as', 'assert', 'begin', 'class', 'constraint', 'do', 'done',
1462
+ # 'downto', 'else', 'end', 'exception', 'external', 'false',
1463
+ # 'for', 'fun', 'function', 'functor', 'if', 'in', 'include',
1464
+ # 'inherit', 'initializer', 'lazy', 'let', 'match', 'method',
1465
+ # 'module', 'mutable', 'new', 'object', 'of', 'open', 'private',
1466
+ # 'raise', 'rec', 'sig', 'struct', 'then', 'to', 'true', 'try',
1467
+ # 'type', 'val', 'virtual', 'when', 'while', 'with'
1468
+ keyopts = [
1469
+ '!=', '#', '&', '&&', r'\(', r'\)', r'\*', r'\+', ',', '-',
1470
+ r'-\.', '->', r'\.', r'\.\.', ':', '::', ':=', ':>', ';', ';;', '<',
1471
+ '<-', '=', '>', '>]', '>}', r'\?', r'\?\?', r'\[', r'\[<', r'\[>',
1472
+ r'\[\|', ']', '_', '`', '{', '{<', r'\|', r'\|]', '}', '~', '=>',
1473
+ r'/\\', r'\\/',
1474
+ u'Π', u'λ',
1475
+ ]
1476
+ operators = r'[!$%&*+\./:<=>?@^|~-]'
1477
+ word_operators = ['and', 'asr', 'land', 'lor', 'lsl', 'lxor', 'mod', 'or']
1478
+ prefix_syms = r'[!?~]'
1479
+ infix_syms = r'[=<>@^|&+\*/$%-]'
1480
+ primitives = ['unit', 'int', 'float', 'bool', 'string', 'char', 'list',
1481
+ 'array']
1482
+
1483
+ tokens = {
1484
+ 'root': [
1485
+ (r'\s+', Text),
1486
+ (r'false|true|\(\)|\[\]', Name.Builtin.Pseudo),
1487
+ (r'\(\*', Comment, 'comment'),
1488
+ (r'\b(%s)\b' % '|'.join(keywords1), Keyword.Namespace),
1489
+ (r'\b(%s)\b' % '|'.join(keywords2), Keyword),
1490
+ (r'\b(%s)\b' % '|'.join(keywords3), Keyword.Type),
1491
+ (r'\b(%s)\b' % '|'.join(keywords4), Keyword),
1492
+ (r'\b(%s)\b' % '|'.join(keywords5), Keyword.Pseudo),
1493
+ (r'\b(%s)\b' % '|'.join(keywords6), Keyword.Reserved),
1494
+ (r'\b([A-Z][A-Za-z0-9_\']*)(?=\s*\.)',
1495
+ Name.Namespace, 'dotted'),
1496
+ (r'\b([A-Z][A-Za-z0-9_\']*)', Name.Class),
1497
+ (r'(%s)' % '|'.join(keyopts), Operator),
1498
+ (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
1499
+ (r'\b(%s)\b' % '|'.join(word_operators), Operator.Word),
1500
+ (r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),
1501
+
1502
+ (r"[^\W\d][\w']*", Name),
1503
+
1504
+ (r'\d[\d_]*', Number.Integer),
1505
+ (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
1506
+ (r'0[oO][0-7][0-7_]*', Number.Oct),
1507
+ (r'0[bB][01][01_]*', Number.Binary),
1508
+ (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float),
1509
+
1510
+ (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'",
1511
+ String.Char),
1512
+ (r"'.'", String.Char),
1513
+ (r"'", Keyword), # a stray quote is another syntax element
1514
+
1515
+ (r'"', String.Double, 'string'),
1516
+
1517
+ (r'[~?][a-z][\w\']*:', Name.Variable),
1518
+ ],
1519
+ 'comment': [
1520
+ (r'[^(*)]+', Comment),
1521
+ (r'\(\*', Comment, '#push'),
1522
+ (r'\*\)', Comment, '#pop'),
1523
+ (r'[(*)]', Comment),
1524
+ ],
1525
+ 'string': [
1526
+ (r'[^"]+', String.Double),
1527
+ (r'""', String.Double),
1528
+ (r'"', String.Double, '#pop'),
1529
+ ],
1530
+ 'dotted': [
1531
+ (r'\s+', Text),
1532
+ (r'\.', Punctuation),
1533
+ (r'[A-Z][A-Za-z0-9_\']*(?=\s*\.)', Name.Namespace),
1534
+ (r'[A-Z][A-Za-z0-9_\']*', Name.Class, '#pop'),
1535
+ (r'[a-z][a-z0-9_\']*', Name, '#pop'),
1536
+ (r'', Text, '#pop')
1537
+ ],
1538
+ }
1539
+
1540
+ def analyse_text(text):
1541
+ if text.startswith('(*'):
1542
+ return True
1543
+
1544
+
1545
+ class NewLispLexer(RegexLexer):
1546
+ """
1547
+ For `newLISP. <www.newlisp.org>`_ source code (version 10.3.0).
1548
+
1549
+ *New in Pygments 1.5.*
1550
+ """
1551
+
1552
+ name = 'NewLisp'
1553
+ aliases = ['newlisp']
1554
+ filenames = ['*.lsp', '*.nl']
1555
+ mimetypes = ['text/x-newlisp', 'application/x-newlisp']
1556
+
1557
+ flags = re.IGNORECASE | re.MULTILINE | re.UNICODE
1558
+
1559
+ # list of built-in functions for newLISP version 10.3
1560
+ builtins = [
1561
+ '^', '--', '-', ':', '!', '!=', '?', '@', '*', '/', '&', '%', '+', '++',
1562
+ '<', '<<', '<=', '=', '>', '>=', '>>', '|', '~', '$', '$0', '$1', '$10',
1563
+ '$11', '$12', '$13', '$14', '$15', '$2', '$3', '$4', '$5', '$6', '$7',
1564
+ '$8', '$9', '$args', '$idx', '$it', '$main-args', 'abort', 'abs',
1565
+ 'acos', 'acosh', 'add', 'address', 'amb', 'and', 'and', 'append-file',
1566
+ 'append', 'apply', 'args', 'array-list', 'array?', 'array', 'asin',
1567
+ 'asinh', 'assoc', 'atan', 'atan2', 'atanh', 'atom?', 'base64-dec',
1568
+ 'base64-enc', 'bayes-query', 'bayes-train', 'begin', 'begin', 'begin',
1569
+ 'beta', 'betai', 'bind', 'binomial', 'bits', 'callback', 'case', 'case',
1570
+ 'case', 'catch', 'ceil', 'change-dir', 'char', 'chop', 'Class', 'clean',
1571
+ 'close', 'command-event', 'cond', 'cond', 'cond', 'cons', 'constant',
1572
+ 'context?', 'context', 'copy-file', 'copy', 'cos', 'cosh', 'count',
1573
+ 'cpymem', 'crc32', 'crit-chi2', 'crit-z', 'current-line', 'curry',
1574
+ 'date-list', 'date-parse', 'date-value', 'date', 'debug', 'dec',
1575
+ 'def-new', 'default', 'define-macro', 'define-macro', 'define',
1576
+ 'delete-file', 'delete-url', 'delete', 'destroy', 'det', 'device',
1577
+ 'difference', 'directory?', 'directory', 'div', 'do-until', 'do-while',
1578
+ 'doargs', 'dolist', 'dostring', 'dotimes', 'dotree', 'dump', 'dup',
1579
+ 'empty?', 'encrypt', 'ends-with', 'env', 'erf', 'error-event',
1580
+ 'eval-string', 'eval', 'exec', 'exists', 'exit', 'exp', 'expand',
1581
+ 'explode', 'extend', 'factor', 'fft', 'file-info', 'file?', 'filter',
1582
+ 'find-all', 'find', 'first', 'flat', 'float?', 'float', 'floor', 'flt',
1583
+ 'fn', 'for-all', 'for', 'fork', 'format', 'fv', 'gammai', 'gammaln',
1584
+ 'gcd', 'get-char', 'get-float', 'get-int', 'get-long', 'get-string',
1585
+ 'get-url', 'global?', 'global', 'if-not', 'if', 'ifft', 'import', 'inc',
1586
+ 'index', 'inf?', 'int', 'integer?', 'integer', 'intersect', 'invert',
1587
+ 'irr', 'join', 'lambda-macro', 'lambda?', 'lambda', 'last-error',
1588
+ 'last', 'legal?', 'length', 'let', 'let', 'let', 'letex', 'letn',
1589
+ 'letn', 'letn', 'list?', 'list', 'load', 'local', 'log', 'lookup',
1590
+ 'lower-case', 'macro?', 'main-args', 'MAIN', 'make-dir', 'map', 'mat',
1591
+ 'match', 'max', 'member', 'min', 'mod', 'module', 'mul', 'multiply',
1592
+ 'NaN?', 'net-accept', 'net-close', 'net-connect', 'net-error',
1593
+ 'net-eval', 'net-interface', 'net-ipv', 'net-listen', 'net-local',
1594
+ 'net-lookup', 'net-packet', 'net-peek', 'net-peer', 'net-ping',
1595
+ 'net-receive-from', 'net-receive-udp', 'net-receive', 'net-select',
1596
+ 'net-send-to', 'net-send-udp', 'net-send', 'net-service',
1597
+ 'net-sessions', 'new', 'nil?', 'nil', 'normal', 'not', 'now', 'nper',
1598
+ 'npv', 'nth', 'null?', 'number?', 'open', 'or', 'ostype', 'pack',
1599
+ 'parse-date', 'parse', 'peek', 'pipe', 'pmt', 'pop-assoc', 'pop',
1600
+ 'post-url', 'pow', 'prefix', 'pretty-print', 'primitive?', 'print',
1601
+ 'println', 'prob-chi2', 'prob-z', 'process', 'prompt-event',
1602
+ 'protected?', 'push', 'put-url', 'pv', 'quote?', 'quote', 'rand',
1603
+ 'random', 'randomize', 'read', 'read-char', 'read-expr', 'read-file',
1604
+ 'read-key', 'read-line', 'read-utf8', 'read', 'reader-event',
1605
+ 'real-path', 'receive', 'ref-all', 'ref', 'regex-comp', 'regex',
1606
+ 'remove-dir', 'rename-file', 'replace', 'reset', 'rest', 'reverse',
1607
+ 'rotate', 'round', 'save', 'search', 'seed', 'seek', 'select', 'self',
1608
+ 'semaphore', 'send', 'sequence', 'series', 'set-locale', 'set-ref-all',
1609
+ 'set-ref', 'set', 'setf', 'setq', 'sgn', 'share', 'signal', 'silent',
1610
+ 'sin', 'sinh', 'sleep', 'slice', 'sort', 'source', 'spawn', 'sqrt',
1611
+ 'starts-with', 'string?', 'string', 'sub', 'swap', 'sym', 'symbol?',
1612
+ 'symbols', 'sync', 'sys-error', 'sys-info', 'tan', 'tanh', 'term',
1613
+ 'throw-error', 'throw', 'time-of-day', 'time', 'timer', 'title-case',
1614
+ 'trace-highlight', 'trace', 'transpose', 'Tree', 'trim', 'true?',
1615
+ 'true', 'unicode', 'unify', 'unique', 'unless', 'unpack', 'until',
1616
+ 'upper-case', 'utf8', 'utf8len', 'uuid', 'wait-pid', 'when', 'while',
1617
+ 'write', 'write-char', 'write-file', 'write-line', 'write',
1618
+ 'xfer-event', 'xml-error', 'xml-parse', 'xml-type-tags', 'zero?',
1619
+ ]
1620
+
1621
+ # valid names
1622
+ valid_name = r'([a-zA-Z0-9!$%&*+.,/<=>?@^_~|-])+|(\[.*?\])+'
1623
+
1624
+ tokens = {
1625
+ 'root': [
1626
+ # shebang
1627
+ (r'#!(.*?)$', Comment.Preproc),
1628
+ # comments starting with semicolon
1629
+ (r';.*$', Comment.Single),
1630
+ # comments starting with #
1631
+ (r'#.*$', Comment.Single),
1632
+
1633
+ # whitespace
1634
+ (r'\s+', Text),
1635
+
1636
+ # strings, symbols and characters
1637
+ (r'"(\\\\|\\"|[^"])*"', String),
1638
+
1639
+ # braces
1640
+ (r"{", String, "bracestring"),
1641
+
1642
+ # [text] ... [/text] delimited strings
1643
+ (r'\[text\]*', String, "tagstring"),
1644
+
1645
+ # 'special' operators...
1646
+ (r"('|:)", Operator),
1647
+
1648
+ # highlight the builtins
1649
+ ('(%s)' % '|'.join(re.escape(entry) + '\\b' for entry in builtins),
1650
+ Keyword),
1651
+
1652
+ # the remaining functions
1653
+ (r'(?<=\()' + valid_name, Name.Variable),
1654
+
1655
+ # the remaining variables
1656
+ (valid_name, String.Symbol),
1657
+
1658
+ # parentheses
1659
+ (r'(\(|\))', Punctuation),
1660
+ ],
1661
+
1662
+ # braced strings...
1663
+ 'bracestring': [
1664
+ ("{", String, "#push"),
1665
+ ("}", String, "#pop"),
1666
+ ("[^{}]+", String),
1667
+ ],
1668
+
1669
+ # tagged [text]...[/text] delimited strings...
1670
+ 'tagstring': [
1671
+ (r'(?s)(.*?)(\[/text\])', String, '#pop'),
1672
+ ],
1673
+ }
1674
+
1675
+
1676
+ class ElixirLexer(RegexLexer):
1677
+ """
1678
+ For the `Elixir language <http://elixir-lang.org>`_.
1679
+
1680
+ *New in Pygments 1.5.*
1681
+ """
1682
+
1683
+ name = 'Elixir'
1684
+ aliases = ['elixir', 'ex', 'exs']
1685
+ filenames = ['*.ex', '*.exs']
1686
+ mimetypes = ['text/x-elixir']
1687
+
1688
+ tokens = {
1689
+ 'root': [
1690
+ (r'\s+', Text),
1691
+ (r'#.*$', Comment.Single),
1692
+ (r'\b(case|end|bc|lc|if|unless|try|loop|receive|fn|defmodule|'
1693
+ r'defp|def|defprotocol|defimpl|defrecord|defmacro|defdelegate|'
1694
+ r'defexception|exit|raise|throw)\b(?![?!])|'
1695
+ r'(?<!\.)\b(do|\-\>)\b\s*', Keyword),
1696
+ (r'\b(import|require|use|recur|quote|unquote|super)\b(?![?!])',
1697
+ Keyword.Namespace),
1698
+ (r'(?<!\.)\b(and|not|or|when|xor|in)\b', Operator.Word),
1699
+ (r'%=|\*=|\*\*=|\+=|\-=|\^=|\|\|=|'
1700
+ r'<=>|<(?!<|=)|>(?!<|=|>)|<=|>=|===|==|=~|!=|!~|(?=[ \t])\?|'
1701
+ r'(?<=[ \t])!+|&&|\|\||\^|\*|\+|\-|/|'
1702
+ r'\||\+\+|\-\-|\*\*|\/\/|\<\-|\<\>|<<|>>|=|\.', Operator),
1703
+ (r'(?<!:)(:)([a-zA-Z_]\w*([?!]|=(?![>=]))?|\<\>|===?|>=?|<=?|'
1704
+ r'<=>|&&?|%\(\)|%\[\]|%\{\}|\+\+?|\-\-?|\|\|?|\!|//|[%&`/\|]|'
1705
+ r'\*\*?|=?~|<\-)|([a-zA-Z_]\w*([?!])?)(:)(?!:)', String.Symbol),
1706
+ (r':"', String.Symbol, 'interpoling_symbol'),
1707
+ (r'\b(nil|true|false)\b(?![?!])|\b[A-Z]\w*\b', Name.Constant),
1708
+ (r'\b(__(FILE|LINE|MODULE|STOP_ITERATOR|EXCEPTION|OP|REF|FUNCTION|'
1709
+ r'BLOCK|KVBLOCK)__)\b(?![?!])', Name.Builtin.Pseudo),
1710
+ (r'[a-zA-Z_][\w_]*[\!\?]?', Name),
1711
+ (r'[(){};,/\|:\\\[\]]', Punctuation),
1712
+ (r'@[a-zA-Z_]\w*|&\d', Name.Variable),
1713
+ (r'\b(0[xX][0-9A-Fa-f]+|\d(_?\d)*(\.(?![^[:space:][:digit:]])'
1714
+ r'(_?\d)*)?([eE][-+]?\d(_?\d)*)?|0[bB][01]+)\b', Number),
1715
+ include('strings'),
1716
+ ],
1717
+ 'strings': [
1718
+ (r'"""(?:.|\n)*?"""', String.Doc),
1719
+ (r"'''(?:.|\n)*?'''", String.Doc),
1720
+ (r'"', String.Double, 'dqs'),
1721
+ (r"'.*'", String.Single),
1722
+ (r'(?<!\w)\?(\\(x\d{1,2}|\h{1,2}(?!\h)\b|0[0-7]{0,2}(?![0-7])\b|'
1723
+ r'[^x0MC])|(\\[MC]-)+\w|[^\s\\])', String.Other)
1724
+ ],
1725
+ 'dqs': [
1726
+ (r'"', String.Double, "#pop"),
1727
+ include('interpoling'),
1728
+ (r'[^#"]+', String.Double),
1729
+ ],
1730
+ 'interpoling': [
1731
+ (r'#{', String.Interpol, 'interpoling_string'),
1732
+ ],
1733
+ 'interpoling_string' : [
1734
+ (r'}', String.Interpol, "#pop"),
1735
+ include('root')
1736
+ ],
1737
+ 'interpoling_symbol': [
1738
+ (r'"', String.Symbol, "#pop"),
1739
+ include('interpoling'),
1740
+ (r'[^#"]+', String.Symbol),
1741
+ ],
1742
+ }
1743
+
1744
+
1745
+ class ElixirConsoleLexer(Lexer):
1746
+ """
1747
+ For Elixir interactive console (iex) output like:
1748
+
1749
+ .. sourcecode:: iex
1750
+
1751
+ iex> [head | tail] = [1,2,3]
1752
+ [1,2,3]
1753
+ iex> head
1754
+ 1
1755
+ iex> tail
1756
+ [2,3]
1757
+ iex> [head | tail]
1758
+ [1,2,3]
1759
+ iex> length [head | tail]
1760
+ 3
1761
+
1762
+ *New in Pygments 1.5.*
1763
+ """
1764
+
1765
+ name = 'Elixir iex session'
1766
+ aliases = ['iex']
1767
+ mimetypes = ['text/x-elixir-shellsession']
1768
+
1769
+ _prompt_re = re.compile('(iex|\.{3})> ')
1770
+
1771
+ def get_tokens_unprocessed(self, text):
1772
+ exlexer = ElixirLexer(**self.options)
1773
+
1774
+ curcode = ''
1775
+ insertions = []
1776
+ for match in line_re.finditer(text):
1777
+ line = match.group()
1778
+ if line.startswith(u'** '):
1779
+ insertions.append((len(curcode),
1780
+ [(0, Generic.Error, line[:-1])]))
1781
+ curcode += line[-1:]
1782
+ else:
1783
+ m = self._prompt_re.match(line)
1784
+ if m is not None:
1785
+ end = m.end()
1786
+ insertions.append((len(curcode),
1787
+ [(0, Generic.Prompt, line[:end])]))
1788
+ curcode += line[end:]
1789
+ else:
1790
+ if curcode:
1791
+ for item in do_insertions(insertions,
1792
+ exlexer.get_tokens_unprocessed(curcode)):
1793
+ yield item
1794
+ curcode = ''
1795
+ insertions = []
1796
+ yield match.start(), Generic.Output, line
1797
+ if curcode:
1798
+ for item in do_insertions(insertions,
1799
+ exlexer.get_tokens_unprocessed(curcode)):
1800
+ yield item