pygmentize 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (101) hide show
  1. data/LICENSE +19 -0
  2. data/lib/pygments.rb +23 -0
  3. data/pygmentize.gemspec +11 -0
  4. data/test/pygments.rb +19 -0
  5. data/vendor/pygmentize.py +7 -0
  6. data/vendor/pygments/AUTHORS +73 -0
  7. data/vendor/pygments/LICENSE +25 -0
  8. data/vendor/pygments/__init__.py +91 -0
  9. data/vendor/pygments/__init__.pyc +0 -0
  10. data/vendor/pygments/cmdline.py +430 -0
  11. data/vendor/pygments/cmdline.pyc +0 -0
  12. data/vendor/pygments/console.py +74 -0
  13. data/vendor/pygments/console.pyc +0 -0
  14. data/vendor/pygments/filter.py +74 -0
  15. data/vendor/pygments/filter.pyc +0 -0
  16. data/vendor/pygments/filters/__init__.py +357 -0
  17. data/vendor/pygments/filters/__init__.pyc +0 -0
  18. data/vendor/pygments/formatter.py +92 -0
  19. data/vendor/pygments/formatter.pyc +0 -0
  20. data/vendor/pygments/formatters/__init__.py +68 -0
  21. data/vendor/pygments/formatters/__init__.pyc +0 -0
  22. data/vendor/pygments/formatters/_mapping.py +92 -0
  23. data/vendor/pygments/formatters/_mapping.pyc +0 -0
  24. data/vendor/pygments/formatters/bbcode.py +109 -0
  25. data/vendor/pygments/formatters/bbcode.pyc +0 -0
  26. data/vendor/pygments/formatters/html.py +723 -0
  27. data/vendor/pygments/formatters/html.pyc +0 -0
  28. data/vendor/pygments/formatters/img.py +553 -0
  29. data/vendor/pygments/formatters/img.pyc +0 -0
  30. data/vendor/pygments/formatters/latex.py +354 -0
  31. data/vendor/pygments/formatters/latex.pyc +0 -0
  32. data/vendor/pygments/formatters/other.py +117 -0
  33. data/vendor/pygments/formatters/other.pyc +0 -0
  34. data/vendor/pygments/formatters/rtf.py +136 -0
  35. data/vendor/pygments/formatters/rtf.pyc +0 -0
  36. data/vendor/pygments/formatters/svg.py +154 -0
  37. data/vendor/pygments/formatters/svg.pyc +0 -0
  38. data/vendor/pygments/formatters/terminal.py +109 -0
  39. data/vendor/pygments/formatters/terminal.pyc +0 -0
  40. data/vendor/pygments/formatters/terminal256.py +219 -0
  41. data/vendor/pygments/formatters/terminal256.pyc +0 -0
  42. data/vendor/pygments/lexer.py +660 -0
  43. data/vendor/pygments/lexer.pyc +0 -0
  44. data/vendor/pygments/lexers/__init__.py +226 -0
  45. data/vendor/pygments/lexers/__init__.pyc +0 -0
  46. data/vendor/pygments/lexers/_asybuiltins.py +1645 -0
  47. data/vendor/pygments/lexers/_clbuiltins.py +232 -0
  48. data/vendor/pygments/lexers/_luabuiltins.py +256 -0
  49. data/vendor/pygments/lexers/_mapping.py +234 -0
  50. data/vendor/pygments/lexers/_mapping.pyc +0 -0
  51. data/vendor/pygments/lexers/_phpbuiltins.py +3389 -0
  52. data/vendor/pygments/lexers/_vimbuiltins.py +3 -0
  53. data/vendor/pygments/lexers/agile.py +1485 -0
  54. data/vendor/pygments/lexers/agile.pyc +0 -0
  55. data/vendor/pygments/lexers/asm.py +353 -0
  56. data/vendor/pygments/lexers/compiled.py +2365 -0
  57. data/vendor/pygments/lexers/dotnet.py +355 -0
  58. data/vendor/pygments/lexers/functional.py +756 -0
  59. data/vendor/pygments/lexers/functional.pyc +0 -0
  60. data/vendor/pygments/lexers/math.py +461 -0
  61. data/vendor/pygments/lexers/other.py +2297 -0
  62. data/vendor/pygments/lexers/parsers.py +695 -0
  63. data/vendor/pygments/lexers/special.py +100 -0
  64. data/vendor/pygments/lexers/special.pyc +0 -0
  65. data/vendor/pygments/lexers/templates.py +1387 -0
  66. data/vendor/pygments/lexers/text.py +1586 -0
  67. data/vendor/pygments/lexers/web.py +1619 -0
  68. data/vendor/pygments/lexers/web.pyc +0 -0
  69. data/vendor/pygments/plugin.py +74 -0
  70. data/vendor/pygments/plugin.pyc +0 -0
  71. data/vendor/pygments/scanner.py +104 -0
  72. data/vendor/pygments/style.py +117 -0
  73. data/vendor/pygments/style.pyc +0 -0
  74. data/vendor/pygments/styles/__init__.py +68 -0
  75. data/vendor/pygments/styles/__init__.pyc +0 -0
  76. data/vendor/pygments/styles/autumn.py +65 -0
  77. data/vendor/pygments/styles/borland.py +51 -0
  78. data/vendor/pygments/styles/bw.py +49 -0
  79. data/vendor/pygments/styles/colorful.py +81 -0
  80. data/vendor/pygments/styles/default.py +73 -0
  81. data/vendor/pygments/styles/default.pyc +0 -0
  82. data/vendor/pygments/styles/emacs.py +72 -0
  83. data/vendor/pygments/styles/friendly.py +72 -0
  84. data/vendor/pygments/styles/fruity.py +43 -0
  85. data/vendor/pygments/styles/manni.py +75 -0
  86. data/vendor/pygments/styles/monokai.py +106 -0
  87. data/vendor/pygments/styles/murphy.py +80 -0
  88. data/vendor/pygments/styles/native.py +65 -0
  89. data/vendor/pygments/styles/pastie.py +75 -0
  90. data/vendor/pygments/styles/perldoc.py +69 -0
  91. data/vendor/pygments/styles/tango.py +141 -0
  92. data/vendor/pygments/styles/trac.py +63 -0
  93. data/vendor/pygments/styles/vim.py +63 -0
  94. data/vendor/pygments/styles/vs.py +38 -0
  95. data/vendor/pygments/token.py +198 -0
  96. data/vendor/pygments/token.pyc +0 -0
  97. data/vendor/pygments/unistring.py +130 -0
  98. data/vendor/pygments/unistring.pyc +0 -0
  99. data/vendor/pygments/util.py +226 -0
  100. data/vendor/pygments/util.pyc +0 -0
  101. metadata +166 -0
@@ -0,0 +1,355 @@
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ pygments.lexers.dotnet
4
+ ~~~~~~~~~~~~~~~~~~~~~~
5
+
6
+ Lexers for .net languages.
7
+
8
+ :copyright: Copyright 2006-2010 by the Pygments team, see AUTHORS.
9
+ :license: BSD, see LICENSE for details.
10
+ """
11
+ import re
12
+
13
+ from pygments.lexer import RegexLexer, DelegatingLexer, bygroups, using, this
14
+ from pygments.token import Punctuation, \
15
+ Text, Comment, Operator, Keyword, Name, String, Number, Literal, Other
16
+ from pygments.util import get_choice_opt
17
+ from pygments import unistring as uni
18
+
19
+ from pygments.lexers.web import XmlLexer
20
+
21
+ __all__ = ['CSharpLexer', 'BooLexer', 'VbNetLexer', 'CSharpAspxLexer',
22
+ 'VbNetAspxLexer']
23
+
24
+
25
+ def _escape(st):
26
+ return st.replace(u'\\', ur'\\').replace(u'-', ur'\-').\
27
+ replace(u'[', ur'\[').replace(u']', ur'\]')
28
+
29
+ class CSharpLexer(RegexLexer):
30
+ """
31
+ For `C# <http://msdn2.microsoft.com/en-us/vcsharp/default.aspx>`_
32
+ source code.
33
+
34
+ Additional options accepted:
35
+
36
+ `unicodelevel`
37
+ Determines which Unicode characters this lexer allows for identifiers.
38
+ The possible values are:
39
+
40
+ * ``none`` -- only the ASCII letters and numbers are allowed. This
41
+ is the fastest selection.
42
+ * ``basic`` -- all Unicode characters from the specification except
43
+ category ``Lo`` are allowed.
44
+ * ``full`` -- all Unicode characters as specified in the C# specs
45
+ are allowed. Note that this means a considerable slowdown since the
46
+ ``Lo`` category has more than 40,000 characters in it!
47
+
48
+ The default value is ``basic``.
49
+
50
+ *New in Pygments 0.8.*
51
+ """
52
+
53
+ name = 'C#'
54
+ aliases = ['csharp', 'c#']
55
+ filenames = ['*.cs']
56
+ mimetypes = ['text/x-csharp'] # inferred
57
+
58
+ flags = re.MULTILINE | re.DOTALL | re.UNICODE
59
+
60
+ # for the range of allowed unicode characters in identifiers,
61
+ # see http://www.ecma-international.org/publications/files/ECMA-ST/Ecma-334.pdf
62
+
63
+ levels = {
64
+ 'none': '@?[_a-zA-Z][a-zA-Z0-9_]*',
65
+ 'basic': ('@?[_' + uni.Lu + uni.Ll + uni.Lt + uni.Lm + uni.Nl + ']' +
66
+ '[' + uni.Lu + uni.Ll + uni.Lt + uni.Lm + uni.Nl +
67
+ uni.Nd + uni.Pc + uni.Cf + uni.Mn + uni.Mc + ']*'),
68
+ 'full': ('@?(?:_|[^' +
69
+ _escape(uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl')) + '])'
70
+ + '[^' + _escape(uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo',
71
+ 'Nl', 'Nd', 'Pc', 'Cf', 'Mn',
72
+ 'Mc')) + ']*'),
73
+ }
74
+
75
+ tokens = {}
76
+ token_variants = True
77
+
78
+ for levelname, cs_ident in levels.items():
79
+ tokens[levelname] = {
80
+ 'root': [
81
+ # method names
82
+ (r'^([ \t]*(?:' + cs_ident + r'(?:\[\])?\s+)+?)' # return type
83
+ r'(' + cs_ident + ')' # method name
84
+ r'(\s*)(\()', # signature start
85
+ bygroups(using(this), Name.Function, Text, Punctuation)),
86
+ (r'^\s*\[.*?\]', Name.Attribute),
87
+ (r'[^\S\n]+', Text),
88
+ (r'\\\n', Text), # line continuation
89
+ (r'//.*?\n', Comment.Single),
90
+ (r'/[*](.|\n)*?[*]/', Comment.Multiline),
91
+ (r'\n', Text),
92
+ (r'[~!%^&*()+=|\[\]:;,.<>/?-]', Punctuation),
93
+ (r'[{}]', Punctuation),
94
+ (r'@"(\\\\|\\"|[^"])*"', String),
95
+ (r'"(\\\\|\\"|[^"\n])*["\n]', String),
96
+ (r"'\\.'|'[^\\]'", String.Char),
97
+ (r"[0-9](\.[0-9]*)?([eE][+-][0-9]+)?"
98
+ r"[flFLdD]?|0[xX][0-9a-fA-F]+[Ll]?", Number),
99
+ (r'#[ \t]*(if|endif|else|elif|define|undef|'
100
+ r'line|error|warning|region|endregion|pragma)\b.*?\n',
101
+ Comment.Preproc),
102
+ (r'\b(extern)(\s+)(alias)\b', bygroups(Keyword, Text,
103
+ Keyword)),
104
+ (r'(abstract|as|base|break|case|catch|'
105
+ r'checked|const|continue|default|delegate|'
106
+ r'do|else|enum|event|explicit|extern|false|finally|'
107
+ r'fixed|for|foreach|goto|if|implicit|in|interface|'
108
+ r'internal|is|lock|new|null|operator|'
109
+ r'out|override|params|private|protected|public|readonly|'
110
+ r'ref|return|sealed|sizeof|stackalloc|static|'
111
+ r'switch|this|throw|true|try|typeof|'
112
+ r'unchecked|unsafe|virtual|void|while|'
113
+ r'get|set|new|partial|yield|add|remove|value)\b', Keyword),
114
+ (r'(global)(::)', bygroups(Keyword, Punctuation)),
115
+ (r'(bool|byte|char|decimal|double|float|int|long|object|sbyte|'
116
+ r'short|string|uint|ulong|ushort)\b\??', Keyword.Type),
117
+ (r'(class|struct)(\s+)', bygroups(Keyword, Text), 'class'),
118
+ (r'(namespace|using)(\s+)', bygroups(Keyword, Text), 'namespace'),
119
+ (cs_ident, Name),
120
+ ],
121
+ 'class': [
122
+ (cs_ident, Name.Class, '#pop')
123
+ ],
124
+ 'namespace': [
125
+ (r'(?=\()', Text, '#pop'), # using (resource)
126
+ ('(' + cs_ident + r'|\.)+', Name.Namespace, '#pop')
127
+ ]
128
+ }
129
+
130
+ def __init__(self, **options):
131
+ level = get_choice_opt(options, 'unicodelevel', self.tokens.keys(), 'basic')
132
+ if level not in self._all_tokens:
133
+ # compile the regexes now
134
+ self._tokens = self.__class__.process_tokendef(level)
135
+ else:
136
+ self._tokens = self._all_tokens[level]
137
+
138
+ RegexLexer.__init__(self, **options)
139
+
140
+
141
+ class BooLexer(RegexLexer):
142
+ """
143
+ For `Boo <http://boo.codehaus.org/>`_ source code.
144
+ """
145
+
146
+ name = 'Boo'
147
+ aliases = ['boo']
148
+ filenames = ['*.boo']
149
+ mimetypes = ['text/x-boo']
150
+
151
+ tokens = {
152
+ 'root': [
153
+ (r'\s+', Text),
154
+ (r'(#|//).*$', Comment.Single),
155
+ (r'/[*]', Comment.Multiline, 'comment'),
156
+ (r'[]{}:(),.;[]', Punctuation),
157
+ (r'\\\n', Text),
158
+ (r'\\', Text),
159
+ (r'(in|is|and|or|not)\b', Operator.Word),
160
+ (r'/(\\\\|\\/|[^/\s])/', String.Regex),
161
+ (r'@/(\\\\|\\/|[^/])*/', String.Regex),
162
+ (r'=~|!=|==|<<|>>|[-+/*%=<>&^|]', Operator),
163
+ (r'(as|abstract|callable|constructor|destructor|do|import|'
164
+ r'enum|event|final|get|interface|internal|of|override|'
165
+ r'partial|private|protected|public|return|set|static|'
166
+ r'struct|transient|virtual|yield|super|and|break|cast|'
167
+ r'continue|elif|else|ensure|except|for|given|goto|if|in|'
168
+ r'is|isa|not|or|otherwise|pass|raise|ref|try|unless|when|'
169
+ r'while|from|as)\b', Keyword),
170
+ (r'def(?=\s+\(.*?\))', Keyword),
171
+ (r'(def)(\s+)', bygroups(Keyword, Text), 'funcname'),
172
+ (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
173
+ (r'(namespace)(\s+)', bygroups(Keyword, Text), 'namespace'),
174
+ (r'(?<!\.)(true|false|null|self|__eval__|__switch__|array|'
175
+ r'assert|checked|enumerate|filter|getter|len|lock|map|'
176
+ r'matrix|max|min|normalArrayIndexing|print|property|range|'
177
+ r'rawArrayIndexing|required|typeof|unchecked|using|'
178
+ r'yieldAll|zip)\b', Name.Builtin),
179
+ ('"""(\\\\|\\"|.*?)"""', String.Double),
180
+ ('"(\\\\|\\"|[^"]*?)"', String.Double),
181
+ ("'(\\\\|\\'|[^']*?)'", String.Single),
182
+ ('[a-zA-Z_][a-zA-Z0-9_]*', Name),
183
+ (r'(\d+\.\d*|\d*\.\d+)([fF][+-]?[0-9]+)?', Number.Float),
184
+ (r'[0-9][0-9\.]*(m|ms|d|h|s)', Number),
185
+ (r'0\d+', Number.Oct),
186
+ (r'0x[a-fA-F0-9]+', Number.Hex),
187
+ (r'\d+L', Number.Integer.Long),
188
+ (r'\d+', Number.Integer),
189
+ ],
190
+ 'comment': [
191
+ ('/[*]', Comment.Multiline, '#push'),
192
+ ('[*]/', Comment.Multiline, '#pop'),
193
+ ('[^/*]', Comment.Multiline),
194
+ ('[*/]', Comment.Multiline)
195
+ ],
196
+ 'funcname': [
197
+ ('[a-zA-Z_][a-zA-Z0-9_]*', Name.Function, '#pop')
198
+ ],
199
+ 'classname': [
200
+ ('[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
201
+ ],
202
+ 'namespace': [
203
+ ('[a-zA-Z_][a-zA-Z0-9_.]*', Name.Namespace, '#pop')
204
+ ]
205
+ }
206
+
207
+
208
+ class VbNetLexer(RegexLexer):
209
+ """
210
+ For
211
+ `Visual Basic.NET <http://msdn2.microsoft.com/en-us/vbasic/default.aspx>`_
212
+ source code.
213
+ """
214
+
215
+ name = 'VB.net'
216
+ aliases = ['vb.net', 'vbnet']
217
+ filenames = ['*.vb', '*.bas']
218
+ mimetypes = ['text/x-vbnet', 'text/x-vba'] # (?)
219
+
220
+ flags = re.MULTILINE | re.IGNORECASE
221
+ tokens = {
222
+ 'root': [
223
+ (r'^\s*<.*?>', Name.Attribute),
224
+ (r'\s+', Text),
225
+ (r'\n', Text),
226
+ (r'rem\b.*?\n', Comment),
227
+ (r"'.*?\n", Comment),
228
+ (r'#If\s.*?\sThen|#ElseIf\s.*?\sThen|#End\s+If|#Const|'
229
+ r'#ExternalSource.*?\n|#End\s+ExternalSource|'
230
+ r'#Region.*?\n|#End\s+Region|#ExternalChecksum',
231
+ Comment.Preproc),
232
+ (r'[\(\){}!#,.:]', Punctuation),
233
+ (r'Option\s+(Strict|Explicit|Compare)\s+'
234
+ r'(On|Off|Binary|Text)', Keyword.Declaration),
235
+ (r'(?<!\.)(AddHandler|Alias|'
236
+ r'ByRef|ByVal|Call|Case|Catch|CBool|CByte|CChar|CDate|'
237
+ r'CDec|CDbl|CInt|CLng|CObj|Const|Continue|CSByte|CShort|'
238
+ r'CSng|CStr|CType|CUInt|CULng|CUShort|Declare|'
239
+ r'Default|Delegate|Dim|DirectCast|Do|Each|Else|ElseIf|'
240
+ r'End|EndIf|Enum|Erase|Error|Event|Exit|False|Finally|For|'
241
+ r'Friend|Function|Get|Global|GoSub|GoTo|Handles|If|'
242
+ r'Implements|Imports|Inherits|Interface|'
243
+ r'Let|Lib|Loop|Me|Module|MustInherit|'
244
+ r'MustOverride|MyBase|MyClass|Namespace|Narrowing|New|Next|'
245
+ r'Not|Nothing|NotInheritable|NotOverridable|Of|On|'
246
+ r'Operator|Option|Optional|Overloads|Overridable|'
247
+ r'Overrides|ParamArray|Partial|Private|Property|Protected|'
248
+ r'Public|RaiseEvent|ReadOnly|ReDim|RemoveHandler|Resume|'
249
+ r'Return|Select|Set|Shadows|Shared|Single|'
250
+ r'Static|Step|Stop|Structure|Sub|SyncLock|Then|'
251
+ r'Throw|To|True|Try|TryCast|Wend|'
252
+ r'Using|When|While|Widening|With|WithEvents|'
253
+ r'WriteOnly)\b', Keyword),
254
+ (r'(?<!\.)(Function|Sub|Property)(\s+)',
255
+ bygroups(Keyword, Text), 'funcname'),
256
+ (r'(?<!\.)(Class|Structure|Enum)(\s+)',
257
+ bygroups(Keyword, Text), 'classname'),
258
+ (r'(?<!\.)(Namespace|Imports)(\s+)',
259
+ bygroups(Keyword, Text), 'namespace'),
260
+ (r'(?<!\.)(Boolean|Byte|Char|Date|Decimal|Double|Integer|Long|'
261
+ r'Object|SByte|Short|Single|String|Variant|UInteger|ULong|'
262
+ r'UShort)\b', Keyword.Type),
263
+ (r'(?<!\.)(AddressOf|And|AndAlso|As|GetType|In|Is|IsNot|Like|Mod|'
264
+ r'Or|OrElse|TypeOf|Xor)\b', Operator.Word),
265
+ (r'&=|[*]=|/=|\\=|\^=|\+=|-=|<<=|>>=|<<|>>|:=|'
266
+ r'<=|>=|<>|[-&*/\\^+=<>]',
267
+ Operator),
268
+ ('"', String, 'string'),
269
+ ('[a-zA-Z_][a-zA-Z0-9_]*[%&@!#$]?', Name),
270
+ ('#.*?#', Literal.Date),
271
+ (r'(\d+\.\d*|\d*\.\d+)([fF][+-]?[0-9]+)?', Number.Float),
272
+ (r'\d+([SILDFR]|US|UI|UL)?', Number.Integer),
273
+ (r'&H[0-9a-f]+([SILDFR]|US|UI|UL)?', Number.Integer),
274
+ (r'&O[0-7]+([SILDFR]|US|UI|UL)?', Number.Integer),
275
+ (r'_\n', Text), # Line continuation
276
+ ],
277
+ 'string': [
278
+ (r'""', String),
279
+ (r'"C?', String, '#pop'),
280
+ (r'[^"]+', String),
281
+ ],
282
+ 'funcname': [
283
+ (r'[a-z_][a-z0-9_]*', Name.Function, '#pop')
284
+ ],
285
+ 'classname': [
286
+ (r'[a-z_][a-z0-9_]*', Name.Class, '#pop')
287
+ ],
288
+ 'namespace': [
289
+ (r'[a-z_][a-z0-9_.]*', Name.Namespace, '#pop')
290
+ ],
291
+ }
292
+
293
+ class GenericAspxLexer(RegexLexer):
294
+ """
295
+ Lexer for ASP.NET pages.
296
+ """
297
+
298
+ name = 'aspx-gen'
299
+ filenames = []
300
+ mimetypes = []
301
+
302
+ flags = re.DOTALL
303
+
304
+ tokens = {
305
+ 'root': [
306
+ (r'(<%[@=#]?)(.*?)(%>)', bygroups(Name.Tag, Other, Name.Tag)),
307
+ (r'(<script.*?>)(.*?)(</script>)', bygroups(using(XmlLexer),
308
+ Other,
309
+ using(XmlLexer))),
310
+ (r'(.+?)(?=<)', using(XmlLexer)),
311
+ (r'.+', using(XmlLexer)),
312
+ ],
313
+ }
314
+
315
+ #TODO support multiple languages within the same source file
316
+ class CSharpAspxLexer(DelegatingLexer):
317
+ """
318
+ Lexer for highligting C# within ASP.NET pages.
319
+ """
320
+
321
+ name = 'aspx-cs'
322
+ aliases = ['aspx-cs']
323
+ filenames = ['*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd']
324
+ mimetypes = []
325
+
326
+ def __init__(self, **options):
327
+ super(CSharpAspxLexer, self).__init__(CSharpLexer,GenericAspxLexer,
328
+ **options)
329
+
330
+ def analyse_text(text):
331
+ if re.search(r'Page\s*Language="C#"', text, re.I) is not None:
332
+ return 0.2
333
+ elif re.search(r'script[^>]+language=["\']C#', text, re.I) is not None:
334
+ return 0.15
335
+ return 0.001 # TODO really only for when filename matched...
336
+
337
+ class VbNetAspxLexer(DelegatingLexer):
338
+ """
339
+ Lexer for highligting Visual Basic.net within ASP.NET pages.
340
+ """
341
+
342
+ name = 'aspx-vb'
343
+ aliases = ['aspx-vb']
344
+ filenames = ['*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd']
345
+ mimetypes = []
346
+
347
+ def __init__(self, **options):
348
+ super(VbNetAspxLexer, self).__init__(VbNetLexer,GenericAspxLexer,
349
+ **options)
350
+
351
+ def analyse_text(text):
352
+ if re.search(r'Page\s*Language="Vb"', text, re.I) is not None:
353
+ return 0.2
354
+ elif re.search(r'script[^>]+language=["\']vb', text, re.I) is not None:
355
+ return 0.15
@@ -0,0 +1,756 @@
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ pygments.lexers.functional
4
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~
5
+
6
+ Lexers for functional languages.
7
+
8
+ :copyright: Copyright 2006-2010 by the Pygments team, see AUTHORS.
9
+ :license: BSD, see LICENSE for details.
10
+ """
11
+
12
+ import re
13
+
14
+ from pygments.lexer import Lexer, RegexLexer, bygroups, include, do_insertions
15
+ from pygments.token import Text, Comment, Operator, Keyword, Name, \
16
+ String, Number, Punctuation, Literal, Generic
17
+
18
+
19
+ __all__ = ['SchemeLexer', 'CommonLispLexer', 'HaskellLexer', 'LiterateHaskellLexer',
20
+ 'OcamlLexer', 'ErlangLexer', 'ErlangShellLexer']
21
+
22
+
23
+ class SchemeLexer(RegexLexer):
24
+ """
25
+ A Scheme lexer, parsing a stream and outputting the tokens
26
+ needed to highlight scheme code.
27
+ This lexer could be most probably easily subclassed to parse
28
+ other LISP-Dialects like Common Lisp, Emacs Lisp or AutoLisp.
29
+
30
+ This parser is checked with pastes from the LISP pastebin
31
+ at http://paste.lisp.org/ to cover as much syntax as possible.
32
+
33
+ It supports the full Scheme syntax as defined in R5RS.
34
+
35
+ *New in Pygments 0.6.*
36
+ """
37
+ name = 'Scheme'
38
+ aliases = ['scheme', 'scm']
39
+ filenames = ['*.scm']
40
+ mimetypes = ['text/x-scheme', 'application/x-scheme']
41
+
42
+ # list of known keywords and builtins taken form vim 6.4 scheme.vim
43
+ # syntax file.
44
+ keywords = [
45
+ 'lambda', 'define', 'if', 'else', 'cond', 'and', 'or', 'case', 'let',
46
+ 'let*', 'letrec', 'begin', 'do', 'delay', 'set!', '=>', 'quote',
47
+ 'quasiquote', 'unquote', 'unquote-splicing', 'define-syntax',
48
+ 'let-syntax', 'letrec-syntax', 'syntax-rules'
49
+ ]
50
+ builtins = [
51
+ '*', '+', '-', '/', '<', '<=', '=', '>', '>=', 'abs', 'acos', 'angle',
52
+ 'append', 'apply', 'asin', 'assoc', 'assq', 'assv', 'atan',
53
+ 'boolean?', 'caaaar', 'caaadr', 'caaar', 'caadar', 'caaddr', 'caadr',
54
+ 'caar', 'cadaar', 'cadadr', 'cadar', 'caddar', 'cadddr', 'caddr',
55
+ 'cadr', 'call-with-current-continuation', 'call-with-input-file',
56
+ 'call-with-output-file', 'call-with-values', 'call/cc', 'car',
57
+ 'cdaaar', 'cdaadr', 'cdaar', 'cdadar', 'cdaddr', 'cdadr', 'cdar',
58
+ 'cddaar', 'cddadr', 'cddar', 'cdddar', 'cddddr', 'cdddr', 'cddr',
59
+ 'cdr', 'ceiling', 'char->integer', 'char-alphabetic?', 'char-ci<=?',
60
+ 'char-ci<?', 'char-ci=?', 'char-ci>=?', 'char-ci>?', 'char-downcase',
61
+ 'char-lower-case?', 'char-numeric?', 'char-ready?', 'char-upcase',
62
+ 'char-upper-case?', 'char-whitespace?', 'char<=?', 'char<?', 'char=?',
63
+ 'char>=?', 'char>?', 'char?', 'close-input-port', 'close-output-port',
64
+ 'complex?', 'cons', 'cos', 'current-input-port', 'current-output-port',
65
+ 'denominator', 'display', 'dynamic-wind', 'eof-object?', 'eq?',
66
+ 'equal?', 'eqv?', 'eval', 'even?', 'exact->inexact', 'exact?', 'exp',
67
+ 'expt', 'floor', 'for-each', 'force', 'gcd', 'imag-part',
68
+ 'inexact->exact', 'inexact?', 'input-port?', 'integer->char',
69
+ 'integer?', 'interaction-environment', 'lcm', 'length', 'list',
70
+ 'list->string', 'list->vector', 'list-ref', 'list-tail', 'list?',
71
+ 'load', 'log', 'magnitude', 'make-polar', 'make-rectangular',
72
+ 'make-string', 'make-vector', 'map', 'max', 'member', 'memq', 'memv',
73
+ 'min', 'modulo', 'negative?', 'newline', 'not', 'null-environment',
74
+ 'null?', 'number->string', 'number?', 'numerator', 'odd?',
75
+ 'open-input-file', 'open-output-file', 'output-port?', 'pair?',
76
+ 'peek-char', 'port?', 'positive?', 'procedure?', 'quotient',
77
+ 'rational?', 'rationalize', 'read', 'read-char', 'real-part', 'real?',
78
+ 'remainder', 'reverse', 'round', 'scheme-report-environment',
79
+ 'set-car!', 'set-cdr!', 'sin', 'sqrt', 'string', 'string->list',
80
+ 'string->number', 'string->symbol', 'string-append', 'string-ci<=?',
81
+ 'string-ci<?', 'string-ci=?', 'string-ci>=?', 'string-ci>?',
82
+ 'string-copy', 'string-fill!', 'string-length', 'string-ref',
83
+ 'string-set!', 'string<=?', 'string<?', 'string=?', 'string>=?',
84
+ 'string>?', 'string?', 'substring', 'symbol->string', 'symbol?',
85
+ 'tan', 'transcript-off', 'transcript-on', 'truncate', 'values',
86
+ 'vector', 'vector->list', 'vector-fill!', 'vector-length',
87
+ 'vector-ref', 'vector-set!', 'vector?', 'with-input-from-file',
88
+ 'with-output-to-file', 'write', 'write-char', 'zero?'
89
+ ]
90
+
91
+ # valid names for identifiers
92
+ # well, names can only not consist fully of numbers
93
+ # but this should be good enough for now
94
+ valid_name = r'[a-zA-Z0-9!$%&*+,/:<=>?@^_~|-]+'
95
+
96
+ tokens = {
97
+ 'root' : [
98
+ # the comments - always starting with semicolon
99
+ # and going to the end of the line
100
+ (r';.*$', Comment.Single),
101
+
102
+ # whitespaces - usually not relevant
103
+ (r'\s+', Text),
104
+
105
+ # numbers
106
+ (r'-?\d+\.\d+', Number.Float),
107
+ (r'-?\d+', Number.Integer),
108
+ # support for uncommon kinds of numbers -
109
+ # have to figure out what the characters mean
110
+ #(r'(#e|#i|#b|#o|#d|#x)[\d.]+', Number),
111
+
112
+ # strings, symbols and characters
113
+ (r'"(\\\\|\\"|[^"])*"', String),
114
+ (r"'" + valid_name, String.Symbol),
115
+ (r"#\\([()/'\".'_!§$%& ?=+-]{1}|[a-zA-Z0-9]+)", String.Char),
116
+
117
+ # constants
118
+ (r'(#t|#f)', Name.Constant),
119
+
120
+ # special operators
121
+ (r"('|#|`|,@|,|\.)", Operator),
122
+
123
+ # highlight the keywords
124
+ ('(%s)' % '|'.join([
125
+ re.escape(entry) + ' ' for entry in keywords]),
126
+ Keyword
127
+ ),
128
+
129
+ # first variable in a quoted string like
130
+ # '(this is syntactic sugar)
131
+ (r"(?<='\()" + valid_name, Name.Variable),
132
+ (r"(?<=#\()" + valid_name, Name.Variable),
133
+
134
+ # highlight the builtins
135
+ ("(?<=\()(%s)" % '|'.join([
136
+ re.escape(entry) + ' ' for entry in builtins]),
137
+ Name.Builtin
138
+ ),
139
+
140
+ # the remaining functions
141
+ (r'(?<=\()' + valid_name, Name.Function),
142
+ # find the remaining variables
143
+ (valid_name, Name.Variable),
144
+
145
+ # the famous parentheses!
146
+ (r'(\(|\))', Punctuation),
147
+ ],
148
+ }
149
+
150
+
151
+ class CommonLispLexer(RegexLexer):
152
+ """
153
+ A Common Lisp lexer.
154
+
155
+ *New in Pygments 0.9.*
156
+ """
157
+ name = 'Common Lisp'
158
+ aliases = ['common-lisp', 'cl']
159
+ filenames = ['*.cl', '*.lisp', '*.el'] # use for Elisp too
160
+ mimetypes = ['text/x-common-lisp']
161
+
162
+ flags = re.IGNORECASE | re.MULTILINE
163
+
164
+ ### couple of useful regexes
165
+
166
+ # characters that are not macro-characters and can be used to begin a symbol
167
+ nonmacro = r'\\.|[a-zA-Z0-9!$%&*+-/<=>?@\[\]^_{}~]'
168
+ constituent = nonmacro + '|[#.:]'
169
+ terminated = r'(?=[ "()\'\n,;`])' # whitespace or terminating macro characters
170
+
171
+ ### symbol token, reverse-engineered from hyperspec
172
+ # Take a deep breath...
173
+ symbol = r'(\|[^|]+\||(?:%s)(?:%s)*)' % (nonmacro, constituent)
174
+
175
+ def __init__(self, **options):
176
+ from pygments.lexers._clbuiltins import BUILTIN_FUNCTIONS, \
177
+ SPECIAL_FORMS, MACROS, LAMBDA_LIST_KEYWORDS, DECLARATIONS, \
178
+ BUILTIN_TYPES, BUILTIN_CLASSES
179
+ self.builtin_function = BUILTIN_FUNCTIONS
180
+ self.special_forms = SPECIAL_FORMS
181
+ self.macros = MACROS
182
+ self.lambda_list_keywords = LAMBDA_LIST_KEYWORDS
183
+ self.declarations = DECLARATIONS
184
+ self.builtin_types = BUILTIN_TYPES
185
+ self.builtin_classes = BUILTIN_CLASSES
186
+ RegexLexer.__init__(self, **options)
187
+
188
+ def get_tokens_unprocessed(self, text):
189
+ stack = ['root']
190
+ for index, token, value in RegexLexer.get_tokens_unprocessed(self, text, stack):
191
+ if token is Name.Variable:
192
+ if value in self.builtin_function:
193
+ yield index, Name.Builtin, value
194
+ continue
195
+ if value in self.special_forms:
196
+ yield index, Keyword, value
197
+ continue
198
+ if value in self.macros:
199
+ yield index, Name.Builtin, value
200
+ continue
201
+ if value in self.lambda_list_keywords:
202
+ yield index, Keyword, value
203
+ continue
204
+ if value in self.declarations:
205
+ yield index, Keyword, value
206
+ continue
207
+ if value in self.builtin_types:
208
+ yield index, Keyword.Type, value
209
+ continue
210
+ if value in self.builtin_classes:
211
+ yield index, Name.Class, value
212
+ continue
213
+ yield index, token, value
214
+
215
+ tokens = {
216
+ 'root' : [
217
+ ('', Text, 'body'),
218
+ ],
219
+ 'multiline-comment' : [
220
+ (r'#\|', Comment.Multiline, '#push'), # (cf. Hyperspec 2.4.8.19)
221
+ (r'\|#', Comment.Multiline, '#pop'),
222
+ (r'[^|#]+', Comment.Multiline),
223
+ (r'[|#]', Comment.Multiline),
224
+ ],
225
+ 'commented-form' : [
226
+ (r'\(', Comment.Preproc, '#push'),
227
+ (r'\)', Comment.Preproc, '#pop'),
228
+ (r'[^()]+', Comment.Preproc),
229
+ ],
230
+ 'body' : [
231
+ # whitespace
232
+ (r'\s+', Text),
233
+
234
+ # single-line comment
235
+ (r';.*$', Comment.Single),
236
+
237
+ # multi-line comment
238
+ (r'#\|', Comment.Multiline, 'multiline-comment'),
239
+
240
+ # encoding comment (?)
241
+ (r'#\d*Y.*$', Comment.Special),
242
+
243
+ # strings and characters
244
+ (r'"(\\.|[^"\\])*"', String),
245
+ # quoting
246
+ (r":" + symbol, String.Symbol),
247
+ (r"'" + symbol, String.Symbol),
248
+ (r"'", Operator),
249
+ (r"`", Operator),
250
+
251
+ # decimal numbers
252
+ (r'[-+]?\d+\.?' + terminated, Number.Integer),
253
+ (r'[-+]?\d+/\d+' + terminated, Number),
254
+ (r'[-+]?(\d*\.\d+([defls][-+]?\d+)?|\d+(\.\d*)?[defls][-+]?\d+)' \
255
+ + terminated, Number.Float),
256
+
257
+ # sharpsign strings and characters
258
+ (r"#\\." + terminated, String.Char),
259
+ (r"#\\" + symbol, String.Char),
260
+
261
+ # vector
262
+ (r'#\(', Operator, 'body'),
263
+
264
+ # bitstring
265
+ (r'#\d*\*[01]*', Literal.Other),
266
+
267
+ # uninterned symbol
268
+ (r'#:' + symbol, String.Symbol),
269
+
270
+ # read-time and load-time evaluation
271
+ (r'#[.,]', Operator),
272
+
273
+ # function shorthand
274
+ (r'#\'', Name.Function),
275
+
276
+ # binary rational
277
+ (r'#[bB][+-]?[01]+(/[01]+)?', Number),
278
+
279
+ # octal rational
280
+ (r'#[oO][+-]?[0-7]+(/[0-7]+)?', Number.Oct),
281
+
282
+ # hex rational
283
+ (r'#[xX][+-]?[0-9a-fA-F]+(/[0-9a-fA-F]+)?', Number.Hex),
284
+
285
+ # radix rational
286
+ (r'#\d+[rR][+-]?[0-9a-zA-Z]+(/[0-9a-zA-Z]+)?', Number),
287
+
288
+ # complex
289
+ (r'(#[cC])(\()', bygroups(Number, Punctuation), 'body'),
290
+
291
+ # array
292
+ (r'(#\d+[aA])(\()', bygroups(Literal.Other, Punctuation), 'body'),
293
+
294
+ # structure
295
+ (r'(#[sS])(\()', bygroups(Literal.Other, Punctuation), 'body'),
296
+
297
+ # path
298
+ (r'#[pP]?"(\\.|[^"])*"', Literal.Other),
299
+
300
+ # reference
301
+ (r'#\d+=', Operator),
302
+ (r'#\d+#', Operator),
303
+
304
+ # read-time comment
305
+ (r'#+nil' + terminated + '\s*\(', Comment.Preproc, 'commented-form'),
306
+
307
+ # read-time conditional
308
+ (r'#[+-]', Operator),
309
+
310
+ # special operators that should have been parsed already
311
+ (r'(,@|,|\.)', Operator),
312
+
313
+ # special constants
314
+ (r'(t|nil)' + terminated, Name.Constant),
315
+
316
+ # functions and variables
317
+ (r'\*' + symbol + '\*', Name.Variable.Global),
318
+ (symbol, Name.Variable),
319
+
320
+ # parentheses
321
+ (r'\(', Punctuation, 'body'),
322
+ (r'\)', Punctuation, '#pop'),
323
+ ],
324
+ }
325
+
326
+
327
+ class HaskellLexer(RegexLexer):
328
+ """
329
+ A Haskell lexer based on the lexemes defined in the Haskell 98 Report.
330
+
331
+ *New in Pygments 0.8.*
332
+ """
333
+ name = 'Haskell'
334
+ aliases = ['haskell', 'hs']
335
+ filenames = ['*.hs']
336
+ mimetypes = ['text/x-haskell']
337
+
338
+ reserved = ['case','class','data','default','deriving','do','else',
339
+ 'if','in','infix[lr]?','instance',
340
+ 'let','newtype','of','then','type','where','_']
341
+ ascii = ['NUL','SOH','[SE]TX','EOT','ENQ','ACK',
342
+ 'BEL','BS','HT','LF','VT','FF','CR','S[OI]','DLE',
343
+ 'DC[1-4]','NAK','SYN','ETB','CAN',
344
+ 'EM','SUB','ESC','[FGRU]S','SP','DEL']
345
+
346
+ tokens = {
347
+ 'root': [
348
+ # Whitespace:
349
+ (r'\s+', Text),
350
+ #(r'--\s*|.*$', Comment.Doc),
351
+ (r'--(?![!#$%&*+./<=>?@\^|_~]).*?$', Comment.Single),
352
+ (r'{-', Comment.Multiline, 'comment'),
353
+ # Lexemes:
354
+ # Identifiers
355
+ (r'\bimport\b', Keyword.Reserved, 'import'),
356
+ (r'\bmodule\b', Keyword.Reserved, 'module'),
357
+ (r'\berror\b', Name.Exception),
358
+ (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
359
+ (r'^[_a-z][\w\']*', Name.Function),
360
+ (r'[_a-z][\w\']*', Name),
361
+ (r'[A-Z][\w\']*', Keyword.Type),
362
+ # Operators
363
+ (r'\\(?![:!#$%&*+.\\/<=>?@^|~-]+)', Name.Function), # lambda operator
364
+ (r'(<-|::|->|=>|=)(?![:!#$%&*+.\\/<=>?@^|~-]+)', Operator.Word), # specials
365
+ (r':[:!#$%&*+.\\/<=>?@^|~-]*', Keyword.Type), # Constructor operators
366
+ (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator), # Other operators
367
+ # Numbers
368
+ (r'\d+[eE][+-]?\d+', Number.Float),
369
+ (r'\d+\.\d+([eE][+-]?\d+)?', Number.Float),
370
+ (r'0[oO][0-7]+', Number.Oct),
371
+ (r'0[xX][\da-fA-F]+', Number.Hex),
372
+ (r'\d+', Number.Integer),
373
+ # Character/String Literals
374
+ (r"'", String.Char, 'character'),
375
+ (r'"', String, 'string'),
376
+ # Special
377
+ (r'\[\]', Keyword.Type),
378
+ (r'\(\)', Name.Builtin),
379
+ (r'[][(),;`{}]', Punctuation),
380
+ ],
381
+ 'import': [
382
+ # Import statements
383
+ (r'\s+', Text),
384
+ # after "funclist" state
385
+ (r'\)', Punctuation, '#pop'),
386
+ (r'qualified\b', Keyword),
387
+ # import X as Y
388
+ (r'([A-Z][a-zA-Z0-9_.]*)(\s+)(as)(\s+)([A-Z][a-zA-Z0-9_.]*)',
389
+ bygroups(Name.Namespace, Text, Keyword, Text, Name), '#pop'),
390
+ # import X hiding (functions)
391
+ (r'([A-Z][a-zA-Z0-9_.]*)(\s+)(hiding)(\s+)(\()',
392
+ bygroups(Name.Namespace, Text, Keyword, Text, Punctuation), 'funclist'),
393
+ # import X (functions)
394
+ (r'([A-Z][a-zA-Z0-9_.]*)(\s+)(\()',
395
+ bygroups(Name.Namespace, Text, Punctuation), 'funclist'),
396
+ # import X
397
+ (r'[a-zA-Z0-9_.]+', Name.Namespace, '#pop'),
398
+ ],
399
+ 'module': [
400
+ (r'\s+', Text),
401
+ (r'([A-Z][a-zA-Z0-9_.]*)(\s+)(\()',
402
+ bygroups(Name.Namespace, Text, Punctuation), 'funclist'),
403
+ (r'[A-Z][a-zA-Z0-9_.]*', Name.Namespace, '#pop'),
404
+ ],
405
+ 'funclist': [
406
+ (r'\s+', Text),
407
+ (r'[A-Z][a-zA-Z0-9_]*', Keyword.Type),
408
+ (r'[_a-z][\w\']+', Name.Function),
409
+ (r'--.*$', Comment.Single),
410
+ (r'{-', Comment.Multiline, 'comment'),
411
+ (r',', Punctuation),
412
+ (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator),
413
+ # (HACK, but it makes sense to push two instances, believe me)
414
+ (r'\(', Punctuation, ('funclist', 'funclist')),
415
+ (r'\)', Punctuation, '#pop:2'),
416
+ ],
417
+ 'comment': [
418
+ # Multiline Comments
419
+ (r'[^-{}]+', Comment.Multiline),
420
+ (r'{-', Comment.Multiline, '#push'),
421
+ (r'-}', Comment.Multiline, '#pop'),
422
+ (r'[-{}]', Comment.Multiline),
423
+ ],
424
+ 'character': [
425
+ # Allows multi-chars, incorrectly.
426
+ (r"[^\\']", String.Char),
427
+ (r"\\", String.Escape, 'escape'),
428
+ ("'", String.Char, '#pop'),
429
+ ],
430
+ 'string': [
431
+ (r'[^\\"]+', String),
432
+ (r"\\", String.Escape, 'escape'),
433
+ ('"', String, '#pop'),
434
+ ],
435
+ 'escape': [
436
+ (r'[abfnrtv"\'&\\]', String.Escape, '#pop'),
437
+ (r'\^[][A-Z@\^_]', String.Escape, '#pop'),
438
+ ('|'.join(ascii), String.Escape, '#pop'),
439
+ (r'o[0-7]+', String.Escape, '#pop'),
440
+ (r'x[\da-fA-F]+', String.Escape, '#pop'),
441
+ (r'\d+', String.Escape, '#pop'),
442
+ (r'\s+\\', String.Escape, '#pop'),
443
+ ],
444
+ }
445
+
446
+
447
+ line_re = re.compile('.*?\n')
448
+ bird_re = re.compile(r'(>[ \t]*)(.*\n)')
449
+
450
+ class LiterateHaskellLexer(Lexer):
451
+ """
452
+ For Literate Haskell (Bird-style or LaTeX) source.
453
+
454
+ Additional options accepted:
455
+
456
+ `litstyle`
457
+ If given, must be ``"bird"`` or ``"latex"``. If not given, the style
458
+ is autodetected: if the first non-whitespace character in the source
459
+ is a backslash or percent character, LaTeX is assumed, else Bird.
460
+
461
+ *New in Pygments 0.9.*
462
+ """
463
+ name = 'Literate Haskell'
464
+ aliases = ['lhs', 'literate-haskell']
465
+ filenames = ['*.lhs']
466
+ mimetypes = ['text/x-literate-haskell']
467
+
468
+ def get_tokens_unprocessed(self, text):
469
+ hslexer = HaskellLexer(**self.options)
470
+
471
+ style = self.options.get('litstyle')
472
+ if style is None:
473
+ style = (text.lstrip()[0:1] in '%\\') and 'latex' or 'bird'
474
+
475
+ code = ''
476
+ insertions = []
477
+ if style == 'bird':
478
+ # bird-style
479
+ for match in line_re.finditer(text):
480
+ line = match.group()
481
+ m = bird_re.match(line)
482
+ if m:
483
+ insertions.append((len(code),
484
+ [(0, Comment.Special, m.group(1))]))
485
+ code += m.group(2)
486
+ else:
487
+ insertions.append((len(code), [(0, Text, line)]))
488
+ else:
489
+ # latex-style
490
+ from pygments.lexers.text import TexLexer
491
+ lxlexer = TexLexer(**self.options)
492
+
493
+ codelines = 0
494
+ latex = ''
495
+ for match in line_re.finditer(text):
496
+ line = match.group()
497
+ if codelines:
498
+ if line.lstrip().startswith('\\end{code}'):
499
+ codelines = 0
500
+ latex += line
501
+ else:
502
+ code += line
503
+ elif line.lstrip().startswith('\\begin{code}'):
504
+ codelines = 1
505
+ latex += line
506
+ insertions.append((len(code),
507
+ list(lxlexer.get_tokens_unprocessed(latex))))
508
+ latex = ''
509
+ else:
510
+ latex += line
511
+ insertions.append((len(code),
512
+ list(lxlexer.get_tokens_unprocessed(latex))))
513
+ for item in do_insertions(insertions, hslexer.get_tokens_unprocessed(code)):
514
+ yield item
515
+
516
+
517
+ class OcamlLexer(RegexLexer):
518
+ """
519
+ For the OCaml language.
520
+
521
+ *New in Pygments 0.7.*
522
+ """
523
+
524
+ name = 'OCaml'
525
+ aliases = ['ocaml']
526
+ filenames = ['*.ml', '*.mli', '*.mll', '*.mly']
527
+ mimetypes = ['text/x-ocaml']
528
+
529
+ keywords = [
530
+ 'as', 'assert', 'begin', 'class', 'constraint', 'do', 'done',
531
+ 'downto', 'else', 'end', 'exception', 'external', 'false',
532
+ 'for', 'fun', 'function', 'functor', 'if', 'in', 'include',
533
+ 'inherit', 'initializer', 'lazy', 'let', 'match', 'method',
534
+ 'module', 'mutable', 'new', 'object', 'of', 'open', 'private',
535
+ 'raise', 'rec', 'sig', 'struct', 'then', 'to', 'true', 'try',
536
+ 'type', 'val', 'virtual', 'when', 'while', 'with'
537
+ ]
538
+ keyopts = [
539
+ '!=','#','&','&&','\(','\)','\*','\+',',','-',
540
+ '-\.','->','\.','\.\.',':','::',':=',':>',';',';;','<',
541
+ '<-','=','>','>]','>}','\?','\?\?','\[','\[<','\[>','\[\|',
542
+ ']','_','`','{','{<','\|','\|]','}','~'
543
+ ]
544
+
545
+ operators = r'[!$%&*+\./:<=>?@^|~-]'
546
+ word_operators = ['and', 'asr', 'land', 'lor', 'lsl', 'lxor', 'mod', 'or']
547
+ prefix_syms = r'[!?~]'
548
+ infix_syms = r'[=<>@^|&+\*/$%-]'
549
+ primitives = ['unit', 'int', 'float', 'bool', 'string', 'char', 'list', 'array']
550
+
551
+ tokens = {
552
+ 'escape-sequence': [
553
+ (r'\\[\"\'ntbr]', String.Escape),
554
+ (r'\\[0-9]{3}', String.Escape),
555
+ (r'\\x[0-9a-fA-F]{2}', String.Escape),
556
+ ],
557
+ 'root': [
558
+ (r'\s+', Text),
559
+ (r'false|true|\(\)|\[\]', Name.Builtin.Pseudo),
560
+ (r'\b([A-Z][A-Za-z0-9_\']*)(?=\s*\.)',
561
+ Name.Namespace, 'dotted'),
562
+ (r'\b([A-Z][A-Za-z0-9_\']*)', Name.Class),
563
+ (r'\(\*', Comment, 'comment'),
564
+ (r'\b(%s)\b' % '|'.join(keywords), Keyword),
565
+ (r'(%s)' % '|'.join(keyopts), Operator),
566
+ (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
567
+ (r'\b(%s)\b' % '|'.join(word_operators), Operator.Word),
568
+ (r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),
569
+
570
+ (r"[^\W\d][\w']*", Name),
571
+
572
+ (r'\d[\d_]*', Number.Integer),
573
+ (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
574
+ (r'0[oO][0-7][0-7_]*', Number.Oct),
575
+ (r'0[bB][01][01_]*', Number.Binary),
576
+ (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float),
577
+
578
+ (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'",
579
+ String.Char),
580
+ (r"'.'", String.Char),
581
+ (r"'", Keyword), # a stray quote is another syntax element
582
+
583
+ (r'"', String.Double, 'string'),
584
+
585
+ (r'[~?][a-z][\w\']*:', Name.Variable),
586
+ ],
587
+ 'comment': [
588
+ (r'[^(*)]+', Comment),
589
+ (r'\(\*', Comment, '#push'),
590
+ (r'\*\)', Comment, '#pop'),
591
+ (r'[(*)]', Comment),
592
+ ],
593
+ 'string': [
594
+ (r'[^\\"]+', String.Double),
595
+ include('escape-sequence'),
596
+ (r'\\\n', String.Double),
597
+ (r'"', String.Double, '#pop'),
598
+ ],
599
+ 'dotted': [
600
+ (r'\s+', Text),
601
+ (r'\.', Punctuation),
602
+ (r'[A-Z][A-Za-z0-9_\']*(?=\s*\.)', Name.Namespace),
603
+ (r'[A-Z][A-Za-z0-9_\']*', Name.Class, '#pop'),
604
+ (r'[a-z][a-z0-9_\']*', Name, '#pop'),
605
+ ],
606
+ }
607
+
608
+
609
+ class ErlangLexer(RegexLexer):
610
+ """
611
+ For the Erlang functional programming language.
612
+
613
+ Blame Jeremy Thurgood (http://jerith.za.net/).
614
+
615
+ *New in Pygments 0.9.*
616
+ """
617
+
618
+ name = 'Erlang'
619
+ aliases = ['erlang']
620
+ filenames = ['*.erl', '*.hrl']
621
+ mimetypes = ['text/x-erlang']
622
+
623
+ keywords = [
624
+ 'after', 'begin', 'case', 'catch', 'cond', 'end', 'fun', 'if',
625
+ 'let', 'of', 'query', 'receive', 'try', 'when',
626
+ ]
627
+
628
+ builtins = [ # See erlang(3) man page
629
+ 'abs', 'append_element', 'apply', 'atom_to_list', 'binary_to_list',
630
+ 'bitstring_to_list', 'binary_to_term', 'bit_size', 'bump_reductions',
631
+ 'byte_size', 'cancel_timer', 'check_process_code', 'delete_module',
632
+ 'demonitor', 'disconnect_node', 'display', 'element', 'erase', 'exit',
633
+ 'float', 'float_to_list', 'fun_info', 'fun_to_list',
634
+ 'function_exported', 'garbage_collect', 'get', 'get_keys',
635
+ 'group_leader', 'hash', 'hd', 'integer_to_list', 'iolist_to_binary',
636
+ 'iolist_size', 'is_atom', 'is_binary', 'is_bitstring', 'is_boolean',
637
+ 'is_builtin', 'is_float', 'is_function', 'is_integer', 'is_list',
638
+ 'is_number', 'is_pid', 'is_port', 'is_process_alive', 'is_record',
639
+ 'is_reference', 'is_tuple', 'length', 'link', 'list_to_atom',
640
+ 'list_to_binary', 'list_to_bitstring', 'list_to_existing_atom',
641
+ 'list_to_float', 'list_to_integer', 'list_to_pid', 'list_to_tuple',
642
+ 'load_module', 'localtime_to_universaltime', 'make_tuple', 'md5',
643
+ 'md5_final', 'md5_update', 'memory', 'module_loaded', 'monitor',
644
+ 'monitor_node', 'node', 'nodes', 'open_port', 'phash', 'phash2',
645
+ 'pid_to_list', 'port_close', 'port_command', 'port_connect',
646
+ 'port_control', 'port_call', 'port_info', 'port_to_list',
647
+ 'process_display', 'process_flag', 'process_info', 'purge_module',
648
+ 'put', 'read_timer', 'ref_to_list', 'register', 'resume_process',
649
+ 'round', 'send', 'send_after', 'send_nosuspend', 'set_cookie',
650
+ 'setelement', 'size', 'spawn', 'spawn_link', 'spawn_monitor',
651
+ 'spawn_opt', 'split_binary', 'start_timer', 'statistics',
652
+ 'suspend_process', 'system_flag', 'system_info', 'system_monitor',
653
+ 'system_profile', 'term_to_binary', 'tl', 'trace', 'trace_delivered',
654
+ 'trace_info', 'trace_pattern', 'trunc', 'tuple_size', 'tuple_to_list',
655
+ 'universaltime_to_localtime', 'unlink', 'unregister', 'whereis'
656
+ ]
657
+
658
+ operators = r'(\+|-|\*|/|<|>|=|==|/=|=:=|=/=|=<|>=|\+\+|--|<-|!)'
659
+ word_operators = [
660
+ 'and', 'andalso', 'band', 'bnot', 'bor', 'bsl', 'bsr', 'bxor',
661
+ 'div', 'not', 'or', 'orelse', 'rem', 'xor'
662
+ ]
663
+
664
+ atom_re = r"(?:[a-z][a-zA-Z0-9_]*|'[^\n']*[^\\]')"
665
+
666
+ variable_re = r'(?:[A-Z_][a-zA-Z0-9_]*)'
667
+
668
+ escape_re = r'(?:\\(?:[bdefnrstv\'"\\/]|[0-7][0-7]?[0-7]?|\^[a-zA-Z]))'
669
+
670
+ macro_re = r'(?:'+variable_re+r'|'+atom_re+r')'
671
+
672
+ base_re = r'(?:[2-9]|[12][0-9]|3[0-6])'
673
+
674
+ tokens = {
675
+ 'root': [
676
+ (r'\s+', Text),
677
+ (r'%.*\n', Comment),
678
+ ('(' + '|'.join(keywords) + r')\b', Keyword),
679
+ ('(' + '|'.join(builtins) + r')\b', Name.Builtin),
680
+ ('(' + '|'.join(word_operators) + r')\b', Operator.Word),
681
+ (r'^-', Punctuation, 'directive'),
682
+ (operators, Operator),
683
+ (r'"', String, 'string'),
684
+ (r'<<', Name.Label),
685
+ (r'>>', Name.Label),
686
+ (r'('+atom_re+')(:)', bygroups(Name.Namespace, Punctuation)),
687
+ (r'^('+atom_re+r')(\s*)(\()', bygroups(Name.Function, Text, Punctuation)),
688
+ (r'[+-]?'+base_re+r'#[0-9a-zA-Z]+', Number.Integer),
689
+ (r'[+-]?\d+', Number.Integer),
690
+ (r'[+-]?\d+.\d+', Number.Float),
691
+ (r'[][:_@\".{}()|;,]', Punctuation),
692
+ (variable_re, Name.Variable),
693
+ (atom_re, Name),
694
+ (r'\?'+macro_re, Name.Constant),
695
+ (r'\$(?:'+escape_re+r'|\\[ %]|[^\\])', String.Char),
696
+ (r'#'+atom_re+r'(:?\.'+atom_re+r')?', Name.Label),
697
+ ],
698
+ 'string': [
699
+ (escape_re, String.Escape),
700
+ (r'"', String, '#pop'),
701
+ (r'~[0-9.*]*[~#+bBcdefginpPswWxX]', String.Interpol),
702
+ (r'[^"\\~]+', String),
703
+ (r'~', String),
704
+ ],
705
+ 'directive': [
706
+ (r'(define)(\s*)(\()('+macro_re+r')',
707
+ bygroups(Name.Entity, Text, Punctuation, Name.Constant), '#pop'),
708
+ (r'(record)(\s*)(\()('+macro_re+r')',
709
+ bygroups(Name.Entity, Text, Punctuation, Name.Label), '#pop'),
710
+ (atom_re, Name.Entity, '#pop'),
711
+ ],
712
+ }
713
+
714
+
715
+ class ErlangShellLexer(Lexer):
716
+ """
717
+ Shell sessions in erl (for Erlang code).
718
+
719
+ *New in Pygments 1.1.*
720
+ """
721
+ name = 'Erlang erl session'
722
+ aliases = ['erl']
723
+ filenames = ['*.erl-sh']
724
+ mimetypes = ['text/x-erl-shellsession']
725
+
726
+ _prompt_re = re.compile(r'\d+>(?=\s|\Z)')
727
+
728
+ def get_tokens_unprocessed(self, text):
729
+ erlexer = ErlangLexer(**self.options)
730
+
731
+ curcode = ''
732
+ insertions = []
733
+ for match in line_re.finditer(text):
734
+ line = match.group()
735
+ m = self._prompt_re.match(line)
736
+ if m is not None:
737
+ end = m.end()
738
+ insertions.append((len(curcode),
739
+ [(0, Generic.Prompt, line[:end])]))
740
+ curcode += line[end:]
741
+ else:
742
+ if curcode:
743
+ for item in do_insertions(insertions,
744
+ erlexer.get_tokens_unprocessed(curcode)):
745
+ yield item
746
+ curcode = ''
747
+ insertions = []
748
+ if line.startswith('*'):
749
+ yield match.start(), Generic.Traceback, line
750
+ else:
751
+ yield match.start(), Generic.Output, line
752
+ if curcode:
753
+ for item in do_insertions(insertions,
754
+ erlexer.get_tokens_unprocessed(curcode)):
755
+ yield item
756
+