pygments.rb 0.2.4 → 0.2.6

Sign up to get free protection for your applications and to get access to all the features.
Files changed (120) hide show
  1. data/lib/pygments/version.rb +1 -1
  2. data/vendor/pygments-main/AUTHORS +14 -0
  3. data/vendor/pygments-main/CHANGES +34 -3
  4. data/vendor/pygments-main/Makefile +1 -1
  5. data/vendor/pygments-main/docs/generate.py +1 -1
  6. data/vendor/pygments-main/external/markdown-processor.py +1 -1
  7. data/vendor/pygments-main/external/moin-parser.py +1 -1
  8. data/vendor/pygments-main/external/rst-directive-old.py +1 -1
  9. data/vendor/pygments-main/external/rst-directive.py +1 -1
  10. data/vendor/pygments-main/pygments/__init__.py +1 -1
  11. data/vendor/pygments-main/pygments/cmdline.py +4 -1
  12. data/vendor/pygments-main/pygments/console.py +1 -1
  13. data/vendor/pygments-main/pygments/filter.py +1 -1
  14. data/vendor/pygments-main/pygments/filters/__init__.py +1 -1
  15. data/vendor/pygments-main/pygments/formatter.py +1 -1
  16. data/vendor/pygments-main/pygments/formatters/__init__.py +1 -1
  17. data/vendor/pygments-main/pygments/formatters/_mapping.py +1 -1
  18. data/vendor/pygments-main/pygments/formatters/bbcode.py +1 -1
  19. data/vendor/pygments-main/pygments/formatters/html.py +2 -2
  20. data/vendor/pygments-main/pygments/formatters/img.py +1 -1
  21. data/vendor/pygments-main/pygments/formatters/latex.py +3 -2
  22. data/vendor/pygments-main/pygments/formatters/other.py +1 -1
  23. data/vendor/pygments-main/pygments/formatters/rtf.py +1 -1
  24. data/vendor/pygments-main/pygments/formatters/svg.py +1 -1
  25. data/vendor/pygments-main/pygments/formatters/terminal.py +5 -2
  26. data/vendor/pygments-main/pygments/formatters/terminal256.py +5 -2
  27. data/vendor/pygments-main/pygments/lexer.py +29 -10
  28. data/vendor/pygments-main/pygments/lexers/__init__.py +14 -11
  29. data/vendor/pygments-main/pygments/lexers/_asybuiltins.py +1 -1
  30. data/vendor/pygments-main/pygments/lexers/_clbuiltins.py +1 -1
  31. data/vendor/pygments-main/pygments/lexers/_luabuiltins.py +1 -1
  32. data/vendor/pygments-main/pygments/lexers/_mapping.py +41 -23
  33. data/vendor/pygments-main/pygments/lexers/_phpbuiltins.py +1 -1
  34. data/vendor/pygments-main/pygments/lexers/_postgres_builtins.py +1 -1
  35. data/vendor/pygments-main/pygments/lexers/_scilab_builtins.py +29 -0
  36. data/vendor/pygments-main/pygments/lexers/_vimbuiltins.py +3 -3
  37. data/vendor/pygments-main/pygments/lexers/agile.py +148 -443
  38. data/vendor/pygments-main/pygments/lexers/asm.py +5 -3
  39. data/vendor/pygments-main/pygments/lexers/compiled.py +298 -294
  40. data/vendor/pygments-main/pygments/lexers/dotnet.py +40 -34
  41. data/vendor/pygments-main/pygments/lexers/functional.py +723 -4
  42. data/vendor/pygments-main/pygments/lexers/hdl.py +228 -6
  43. data/vendor/pygments-main/pygments/lexers/jvm.py +678 -0
  44. data/vendor/pygments-main/pygments/lexers/math.py +65 -2
  45. data/vendor/pygments-main/pygments/lexers/other.py +875 -481
  46. data/vendor/pygments-main/pygments/lexers/parsers.py +1 -1
  47. data/vendor/pygments-main/pygments/lexers/shell.py +360 -0
  48. data/vendor/pygments-main/pygments/lexers/special.py +1 -1
  49. data/vendor/pygments-main/pygments/lexers/sql.py +565 -0
  50. data/vendor/pygments-main/pygments/lexers/templates.py +1 -1
  51. data/vendor/pygments-main/pygments/lexers/text.py +237 -100
  52. data/vendor/pygments-main/pygments/lexers/web.py +146 -10
  53. data/vendor/pygments-main/pygments/plugin.py +1 -1
  54. data/vendor/pygments-main/pygments/scanner.py +1 -1
  55. data/vendor/pygments-main/pygments/style.py +1 -1
  56. data/vendor/pygments-main/pygments/styles/__init__.py +2 -1
  57. data/vendor/pygments-main/pygments/styles/autumn.py +1 -1
  58. data/vendor/pygments-main/pygments/styles/borland.py +1 -1
  59. data/vendor/pygments-main/pygments/styles/bw.py +1 -1
  60. data/vendor/pygments-main/pygments/styles/colorful.py +1 -1
  61. data/vendor/pygments-main/pygments/styles/default.py +1 -1
  62. data/vendor/pygments-main/pygments/styles/emacs.py +1 -1
  63. data/vendor/pygments-main/pygments/styles/friendly.py +1 -1
  64. data/vendor/pygments-main/pygments/styles/fruity.py +1 -2
  65. data/vendor/pygments-main/pygments/styles/manni.py +1 -1
  66. data/vendor/pygments-main/pygments/styles/monokai.py +1 -1
  67. data/vendor/pygments-main/pygments/styles/murphy.py +1 -1
  68. data/vendor/pygments-main/pygments/styles/native.py +1 -1
  69. data/vendor/pygments-main/pygments/styles/pastie.py +1 -1
  70. data/vendor/pygments-main/pygments/styles/perldoc.py +1 -1
  71. data/vendor/pygments-main/pygments/styles/rrt.py +33 -0
  72. data/vendor/pygments-main/pygments/styles/tango.py +1 -1
  73. data/vendor/pygments-main/pygments/styles/trac.py +1 -1
  74. data/vendor/pygments-main/pygments/styles/vim.py +1 -1
  75. data/vendor/pygments-main/pygments/styles/vs.py +1 -1
  76. data/vendor/pygments-main/pygments/token.py +1 -1
  77. data/vendor/pygments-main/pygments/unistring.py +1 -1
  78. data/vendor/pygments-main/pygments/util.py +2 -2
  79. data/vendor/pygments-main/scripts/check_sources.py +2 -2
  80. data/vendor/pygments-main/scripts/find_codetags.py +1 -1
  81. data/vendor/pygments-main/scripts/find_error.py +5 -2
  82. data/vendor/pygments-main/scripts/get_vimkw.py +9 -4
  83. data/vendor/pygments-main/setup.py +1 -1
  84. data/vendor/pygments-main/tests/examplefiles/classes.dylan +16 -0
  85. data/vendor/pygments-main/tests/examplefiles/coq_RelationClasses +447 -0
  86. data/vendor/pygments-main/tests/examplefiles/example.cls +15 -0
  87. data/vendor/pygments-main/tests/examplefiles/example.moon +629 -0
  88. data/vendor/pygments-main/tests/examplefiles/example.p +34 -0
  89. data/vendor/pygments-main/tests/examplefiles/example.snobol +15 -0
  90. data/vendor/pygments-main/tests/examplefiles/example.u +548 -0
  91. data/vendor/pygments-main/tests/examplefiles/example_elixir.ex +363 -0
  92. data/vendor/pygments-main/tests/examplefiles/foo.sce +6 -0
  93. data/vendor/pygments-main/tests/examplefiles/http_request_example +14 -0
  94. data/vendor/pygments-main/tests/examplefiles/http_response_example +27 -0
  95. data/vendor/pygments-main/tests/examplefiles/irc.lsp +214 -0
  96. data/vendor/pygments-main/tests/examplefiles/markdown.lsp +679 -0
  97. data/vendor/pygments-main/tests/examplefiles/nemerle_sample.n +4 -2
  98. data/vendor/pygments-main/tests/examplefiles/reversi.lsp +427 -0
  99. data/vendor/pygments-main/tests/examplefiles/scilab.sci +30 -0
  100. data/vendor/pygments-main/tests/examplefiles/test.bro +250 -0
  101. data/vendor/pygments-main/tests/examplefiles/test.cs +23 -0
  102. data/vendor/pygments-main/tests/examplefiles/test.dart +23 -0
  103. data/vendor/pygments-main/tests/examplefiles/test.ecl +58 -0
  104. data/vendor/pygments-main/tests/examplefiles/test.fan +818 -0
  105. data/vendor/pygments-main/tests/examplefiles/test.ps1 +108 -0
  106. data/vendor/pygments-main/tests/examplefiles/test.vhdl +161 -0
  107. data/vendor/pygments-main/tests/old_run.py +1 -1
  108. data/vendor/pygments-main/tests/run.py +1 -1
  109. data/vendor/pygments-main/tests/test_basic_api.py +4 -3
  110. data/vendor/pygments-main/tests/test_clexer.py +1 -1
  111. data/vendor/pygments-main/tests/test_cmdline.py +1 -1
  112. data/vendor/pygments-main/tests/test_examplefiles.py +4 -3
  113. data/vendor/pygments-main/tests/test_html_formatter.py +33 -1
  114. data/vendor/pygments-main/tests/test_latex_formatter.py +1 -1
  115. data/vendor/pygments-main/tests/test_perllexer.py +137 -0
  116. data/vendor/pygments-main/tests/test_regexlexer.py +1 -1
  117. data/vendor/pygments-main/tests/test_token.py +1 -1
  118. data/vendor/pygments-main/tests/test_using_api.py +1 -1
  119. data/vendor/pygments-main/tests/test_util.py +35 -5
  120. metadata +30 -4
@@ -5,7 +5,7 @@
5
5
 
6
6
  Lexers for parser generators.
7
7
 
8
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
8
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
9
9
  :license: BSD, see LICENSE for details.
10
10
  """
11
11
 
@@ -0,0 +1,360 @@
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ pygments.lexers.shell
4
+ ~~~~~~~~~~~~~~~~~~~~~
5
+
6
+ Lexers for various shells.
7
+
8
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
9
+ :license: BSD, see LICENSE for details.
10
+ """
11
+
12
+ import re
13
+
14
+ from pygments.lexer import Lexer, RegexLexer, do_insertions, bygroups, include
15
+ from pygments.token import Punctuation, \
16
+ Text, Comment, Operator, Keyword, Name, String, Number, Generic
17
+ from pygments.util import shebang_matches
18
+
19
+
20
+ __all__ = ['BashLexer', 'BashSessionLexer', 'TcshLexer', 'BatchLexer',
21
+ 'PowerShellLexer']
22
+
23
+ line_re = re.compile('.*?\n')
24
+
25
+
26
+ class BashLexer(RegexLexer):
27
+ """
28
+ Lexer for (ba|k|)sh shell scripts.
29
+
30
+ *New in Pygments 0.6.*
31
+ """
32
+
33
+ name = 'Bash'
34
+ aliases = ['bash', 'sh', 'ksh']
35
+ filenames = ['*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass']
36
+ mimetypes = ['application/x-sh', 'application/x-shellscript']
37
+
38
+ tokens = {
39
+ 'root': [
40
+ include('basic'),
41
+ (r'\$\(\(', Keyword, 'math'),
42
+ (r'\$\(', Keyword, 'paren'),
43
+ (r'\${#?', Keyword, 'curly'),
44
+ (r'`', String.Backtick, 'backticks'),
45
+ include('data'),
46
+ ],
47
+ 'basic': [
48
+ (r'\b(if|fi|else|while|do|done|for|then|return|function|case|'
49
+ r'select|continue|until|esac|elif)\s*\b',
50
+ Keyword),
51
+ (r'\b(alias|bg|bind|break|builtin|caller|cd|command|compgen|'
52
+ r'complete|declare|dirs|disown|echo|enable|eval|exec|exit|'
53
+ r'export|false|fc|fg|getopts|hash|help|history|jobs|kill|let|'
54
+ r'local|logout|popd|printf|pushd|pwd|read|readonly|set|shift|'
55
+ r'shopt|source|suspend|test|time|times|trap|true|type|typeset|'
56
+ r'ulimit|umask|unalias|unset|wait)\s*\b(?!\.)',
57
+ Name.Builtin),
58
+ (r'#.*\n', Comment),
59
+ (r'\\[\w\W]', String.Escape),
60
+ (r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
61
+ (r'[\[\]{}()=]', Operator),
62
+ (r'<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
63
+ (r'&&|\|\|', Operator),
64
+ ],
65
+ 'data': [
66
+ (r'(?s)\$?"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
67
+ (r"(?s)\$?'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
68
+ (r';', Text),
69
+ (r'\s+', Text),
70
+ (r'[^=\s\n\[\]{}()$"\'`\\<]+', Text),
71
+ (r'\d+(?= |\Z)', Number),
72
+ (r'\$#?(\w+|.)', Name.Variable),
73
+ (r'<', Text),
74
+ ],
75
+ 'curly': [
76
+ (r'}', Keyword, '#pop'),
77
+ (r':-', Keyword),
78
+ (r'[a-zA-Z0-9_]+', Name.Variable),
79
+ (r'[^}:"\'`$]+', Punctuation),
80
+ (r':', Punctuation),
81
+ include('root'),
82
+ ],
83
+ 'paren': [
84
+ (r'\)', Keyword, '#pop'),
85
+ include('root'),
86
+ ],
87
+ 'math': [
88
+ (r'\)\)', Keyword, '#pop'),
89
+ (r'[-+*/%^|&]|\*\*|\|\|', Operator),
90
+ (r'\d+', Number),
91
+ include('root'),
92
+ ],
93
+ 'backticks': [
94
+ (r'`', String.Backtick, '#pop'),
95
+ include('root'),
96
+ ],
97
+ }
98
+
99
+ def analyse_text(text):
100
+ return shebang_matches(text, r'(ba|z|)sh')
101
+
102
+
103
+ class BashSessionLexer(Lexer):
104
+ """
105
+ Lexer for simplistic shell sessions.
106
+
107
+ *New in Pygments 1.1.*
108
+ """
109
+
110
+ name = 'Bash Session'
111
+ aliases = ['console']
112
+ filenames = ['*.sh-session']
113
+ mimetypes = ['application/x-shell-session']
114
+
115
+ def get_tokens_unprocessed(self, text):
116
+ bashlexer = BashLexer(**self.options)
117
+
118
+ pos = 0
119
+ curcode = ''
120
+ insertions = []
121
+
122
+ for match in line_re.finditer(text):
123
+ line = match.group()
124
+ m = re.match(r'^((?:|sh\S*?|\w+\S+[@:]\S+(?:\s+\S+)?|\[\S+[@:]'
125
+ r'[^\n]+\].+)[$#%])(.*\n?)', line)
126
+ if m:
127
+ # To support output lexers (say diff output), the output
128
+ # needs to be broken by prompts whenever the output lexer
129
+ # changes.
130
+ if not insertions:
131
+ pos = match.start()
132
+
133
+ insertions.append((len(curcode),
134
+ [(0, Generic.Prompt, m.group(1))]))
135
+ curcode += m.group(2)
136
+ elif line.startswith('>'):
137
+ insertions.append((len(curcode),
138
+ [(0, Generic.Prompt, line[:1])]))
139
+ curcode += line[1:]
140
+ else:
141
+ if insertions:
142
+ toks = bashlexer.get_tokens_unprocessed(curcode)
143
+ for i, t, v in do_insertions(insertions, toks):
144
+ yield pos+i, t, v
145
+ yield match.start(), Generic.Output, line
146
+ insertions = []
147
+ curcode = ''
148
+ if insertions:
149
+ for i, t, v in do_insertions(insertions,
150
+ bashlexer.get_tokens_unprocessed(curcode)):
151
+ yield pos+i, t, v
152
+
153
+
154
+ class BatchLexer(RegexLexer):
155
+ """
156
+ Lexer for the DOS/Windows Batch file format.
157
+
158
+ *New in Pygments 0.7.*
159
+ """
160
+ name = 'Batchfile'
161
+ aliases = ['bat']
162
+ filenames = ['*.bat', '*.cmd']
163
+ mimetypes = ['application/x-dos-batch']
164
+
165
+ flags = re.MULTILINE | re.IGNORECASE
166
+
167
+ tokens = {
168
+ 'root': [
169
+ # Lines can start with @ to prevent echo
170
+ (r'^\s*@', Punctuation),
171
+ (r'^(\s*)(rem\s.*)$', bygroups(Text, Comment)),
172
+ (r'".*?"', String.Double),
173
+ (r"'.*?'", String.Single),
174
+ # If made more specific, make sure you still allow expansions
175
+ # like %~$VAR:zlt
176
+ (r'%%?[~$:\w]+%?', Name.Variable),
177
+ (r'::.*', Comment), # Technically :: only works at BOL
178
+ (r'(set)(\s+)(\w+)', bygroups(Keyword, Text, Name.Variable)),
179
+ (r'(call)(\s+)(:\w+)', bygroups(Keyword, Text, Name.Label)),
180
+ (r'(goto)(\s+)(\w+)', bygroups(Keyword, Text, Name.Label)),
181
+ (r'\b(set|call|echo|on|off|endlocal|for|do|goto|if|pause|'
182
+ r'setlocal|shift|errorlevel|exist|defined|cmdextversion|'
183
+ r'errorlevel|else|cd|md|del|deltree|cls|choice)\b', Keyword),
184
+ (r'\b(equ|neq|lss|leq|gtr|geq)\b', Operator),
185
+ include('basic'),
186
+ (r'.', Text),
187
+ ],
188
+ 'echo': [
189
+ # Escapes only valid within echo args?
190
+ (r'\^\^|\^<|\^>|\^\|', String.Escape),
191
+ (r'\n', Text, '#pop'),
192
+ include('basic'),
193
+ (r'[^\'"^]+', Text),
194
+ ],
195
+ 'basic': [
196
+ (r'".*?"', String.Double),
197
+ (r"'.*?'", String.Single),
198
+ (r'`.*?`', String.Backtick),
199
+ (r'-?\d+', Number),
200
+ (r',', Punctuation),
201
+ (r'=', Operator),
202
+ (r'/\S+', Name),
203
+ (r':\w+', Name.Label),
204
+ (r'\w:\w+', Text),
205
+ (r'([<>|])(\s*)(\w+)', bygroups(Punctuation, Text, Name)),
206
+ ],
207
+ }
208
+
209
+
210
+ class TcshLexer(RegexLexer):
211
+ """
212
+ Lexer for tcsh scripts.
213
+
214
+ *New in Pygments 0.10.*
215
+ """
216
+
217
+ name = 'Tcsh'
218
+ aliases = ['tcsh', 'csh']
219
+ filenames = ['*.tcsh', '*.csh']
220
+ mimetypes = ['application/x-csh']
221
+
222
+ tokens = {
223
+ 'root': [
224
+ include('basic'),
225
+ (r'\$\(', Keyword, 'paren'),
226
+ (r'\${#?', Keyword, 'curly'),
227
+ (r'`', String.Backtick, 'backticks'),
228
+ include('data'),
229
+ ],
230
+ 'basic': [
231
+ (r'\b(if|endif|else|while|then|foreach|case|default|'
232
+ r'continue|goto|breaksw|end|switch|endsw)\s*\b',
233
+ Keyword),
234
+ (r'\b(alias|alloc|bg|bindkey|break|builtins|bye|caller|cd|chdir|'
235
+ r'complete|dirs|echo|echotc|eval|exec|exit|fg|filetest|getxvers|'
236
+ r'glob|getspath|hashstat|history|hup|inlib|jobs|kill|'
237
+ r'limit|log|login|logout|ls-F|migrate|newgrp|nice|nohup|notify|'
238
+ r'onintr|popd|printenv|pushd|rehash|repeat|rootnode|popd|pushd|'
239
+ r'set|shift|sched|setenv|setpath|settc|setty|setxvers|shift|'
240
+ r'source|stop|suspend|source|suspend|telltc|time|'
241
+ r'umask|unalias|uncomplete|unhash|universe|unlimit|unset|unsetenv|'
242
+ r'ver|wait|warp|watchlog|where|which)\s*\b',
243
+ Name.Builtin),
244
+ (r'#.*\n', Comment),
245
+ (r'\\[\w\W]', String.Escape),
246
+ (r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
247
+ (r'[\[\]{}()=]+', Operator),
248
+ (r'<<\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
249
+ ],
250
+ 'data': [
251
+ (r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
252
+ (r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
253
+ (r'\s+', Text),
254
+ (r'[^=\s\n\[\]{}()$"\'`\\]+', Text),
255
+ (r'\d+(?= |\Z)', Number),
256
+ (r'\$#?(\w+|.)', Name.Variable),
257
+ ],
258
+ 'curly': [
259
+ (r'}', Keyword, '#pop'),
260
+ (r':-', Keyword),
261
+ (r'[a-zA-Z0-9_]+', Name.Variable),
262
+ (r'[^}:"\'`$]+', Punctuation),
263
+ (r':', Punctuation),
264
+ include('root'),
265
+ ],
266
+ 'paren': [
267
+ (r'\)', Keyword, '#pop'),
268
+ include('root'),
269
+ ],
270
+ 'backticks': [
271
+ (r'`', String.Backtick, '#pop'),
272
+ include('root'),
273
+ ],
274
+ }
275
+
276
+
277
+ class PowerShellLexer(RegexLexer):
278
+ """
279
+ For Windows PowerShell code.
280
+
281
+ *New in Pygments 1.5.*
282
+ """
283
+ name = 'PowerShell'
284
+ aliases = ['powershell', 'posh', 'ps1']
285
+ filenames = ['*.ps1']
286
+ mimetypes = ['text/x-powershell']
287
+
288
+ flags = re.DOTALL | re.IGNORECASE | re.MULTILINE
289
+
290
+ keywords = (
291
+ 'while validateset validaterange validatepattern validatelength '
292
+ 'validatecount until trap switch return ref process param parameter in '
293
+ 'if global: function foreach for finally filter end elseif else '
294
+ 'dynamicparam do default continue cmdletbinding break begin alias \\? '
295
+ '% #script #private #local #global mandatory parametersetname position '
296
+ 'valuefrompipeline valuefrompipelinebypropertyname '
297
+ 'valuefromremainingarguments helpmessage try catch').split()
298
+
299
+ operators = (
300
+ 'and as band bnot bor bxor casesensitive ccontains ceq cge cgt cle '
301
+ 'clike clt cmatch cne cnotcontains cnotlike cnotmatch contains '
302
+ 'creplace eq exact f file ge gt icontains ieq ige igt ile ilike ilt '
303
+ 'imatch ine inotcontains inotlike inotmatch ireplace is isnot le like '
304
+ 'lt match ne not notcontains notlike notmatch or regex replace '
305
+ 'wildcard').split()
306
+
307
+ verbs = (
308
+ 'write where wait use update unregister undo trace test tee take '
309
+ 'suspend stop start split sort skip show set send select scroll resume '
310
+ 'restore restart resolve resize reset rename remove register receive '
311
+ 'read push pop ping out new move measure limit join invoke import '
312
+ 'group get format foreach export expand exit enter enable disconnect '
313
+ 'disable debug cxnew copy convertto convertfrom convert connect '
314
+ 'complete compare clear checkpoint aggregate add').split()
315
+
316
+ commenthelp = (
317
+ 'component description example externalhelp forwardhelpcategory '
318
+ 'forwardhelptargetname forwardhelptargetname functionality inputs link '
319
+ 'notes outputs parameter remotehelprunspace role synopsis').split()
320
+
321
+ tokens = {
322
+ 'root': [
323
+ (r'\s+', Text),
324
+ (r'^(\s*#[#\s]*)(\.(?:%s))([^\n]*$)' % '|'.join(commenthelp),
325
+ bygroups(Comment, String.Doc, Comment)),
326
+ (r'#[^\n]*?$', Comment),
327
+ (r'(&lt;|<)#', Comment.Multiline, 'multline'),
328
+ (r'@"\n.*?\n"@', String.Heredoc),
329
+ (r"@'\n.*?\n'@", String.Heredoc),
330
+ (r'"', String.Double, 'string'),
331
+ (r"'([^']|'')*'", String.Single),
332
+ (r'(\$|@@|@)((global|script|private|env):)?[a-z0-9_]+',
333
+ Name.Variable),
334
+ (r'(%s)\b' % '|'.join(keywords), Keyword),
335
+ (r'-(%s)\b' % '|'.join(operators), Operator),
336
+ (r'(%s)-[a-z_][a-z0-9_]*\b' % '|'.join(verbs), Name.Builtin),
337
+ (r'\[[a-z_\[][a-z0-9_. `,\[\]]*\]', Name.Constant), # .net [type]s
338
+ (r'-[a-z_][a-z0-9_]*', Name),
339
+ (r'\w+', Name),
340
+ (r'[.,{}\[\]$()=+*/\\&%!~?^`|<>-]', Punctuation),
341
+ ],
342
+ 'multline': [
343
+ (r'[^#&.]+', Comment.Multiline),
344
+ (r'#(>|&gt;)', Comment.Multiline, '#pop'),
345
+ (r'\.(%s)' % '|'.join(commenthelp), String.Doc),
346
+ (r'[#&.]', Comment.Multiline),
347
+ ],
348
+ 'string': [
349
+ (r'[^$`"]+', String.Double),
350
+ (r'\$\(', String.Interpol, 'interpol'),
351
+ (r'`"|""', String.Double),
352
+ (r'[`$]', String.Double),
353
+ (r'"', String.Double, '#pop'),
354
+ ],
355
+ 'interpol': [
356
+ (r'[^$)]+', String.Interpol),
357
+ (r'\$\(', String.Interpol, '#push'),
358
+ (r'\)', String.Interpol, '#pop'),
359
+ ]
360
+ }
@@ -5,7 +5,7 @@
5
5
 
6
6
  Special lexers.
7
7
 
8
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
8
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
9
9
  :license: BSD, see LICENSE for details.
10
10
  """
11
11
 
@@ -0,0 +1,565 @@
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ pygments.lexers.sql
4
+ ~~~~~~~~~~~~~~~~~~~
5
+
6
+ Lexers for various SQL dialects and related interactive sessions.
7
+
8
+ Postgres specific lexers:
9
+
10
+ `PostgresLexer`
11
+ A SQL lexer for the PostgreSQL dialect. Differences w.r.t. the SQL
12
+ lexer are:
13
+
14
+ - keywords and data types list parsed from the PG docs (run the
15
+ `_postgres_builtins` module to update them);
16
+ - Content of $-strings parsed using a specific lexer, e.g. the content
17
+ of a PL/Python function is parsed using the Python lexer;
18
+ - parse PG specific constructs: E-strings, $-strings, U&-strings,
19
+ different operators and punctuation.
20
+
21
+ `PlPgsqlLexer`
22
+ A lexer for the PL/pgSQL language. Adds a few specific construct on
23
+ top of the PG SQL lexer (such as <<label>>).
24
+
25
+ `PostgresConsoleLexer`
26
+ A lexer to highlight an interactive psql session:
27
+
28
+ - identifies the prompt and does its best to detect the end of command
29
+ in multiline statement where not all the lines are prefixed by a
30
+ prompt, telling them apart from the output;
31
+ - highlights errors in the output and notification levels;
32
+ - handles psql backslash commands.
33
+
34
+ The ``tests/examplefiles`` contains a few test files with data to be
35
+ parsed by these lexers.
36
+
37
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
38
+ :license: BSD, see LICENSE for details.
39
+ """
40
+
41
+ import re
42
+ from copy import deepcopy
43
+
44
+ from pygments.lexer import Lexer, RegexLexer, do_insertions, bygroups
45
+ from pygments.token import Punctuation, \
46
+ Text, Comment, Operator, Keyword, Name, String, Number, Generic
47
+ from pygments.lexers import get_lexer_by_name, ClassNotFound
48
+
49
+ from pygments.lexers._postgres_builtins import KEYWORDS, DATATYPES, \
50
+ PSEUDO_TYPES, PLPGSQL_KEYWORDS
51
+
52
+
53
+ __all__ = ['PostgresLexer', 'PlPgsqlLexer', 'PostgresConsoleLexer',
54
+ 'SqlLexer', 'MySqlLexer', 'SqliteConsoleLexer']
55
+
56
+ line_re = re.compile('.*?\n')
57
+
58
+ language_re = re.compile(r"\s+LANGUAGE\s+'?(\w+)'?", re.IGNORECASE)
59
+
60
+ def language_callback(lexer, match):
61
+ """Parse the content of a $-string using a lexer
62
+
63
+ The lexer is chosen looking for a nearby LANGUAGE.
64
+
65
+ Note: this function should have been a `PostgresBase` method, but the
66
+ rules deepcopy fails in this case.
67
+ """
68
+ l = None
69
+ m = language_re.match(lexer.text[match.end():match.end()+100])
70
+ if m is not None:
71
+ l = lexer._get_lexer(m.group(1))
72
+ else:
73
+ m = list(language_re.finditer(
74
+ lexer.text[max(0, match.start()-100):match.start()]))
75
+ if m:
76
+ l = lexer._get_lexer(m[-1].group(1))
77
+
78
+ if l:
79
+ yield (match.start(1), String, match.group(1))
80
+ for x in l.get_tokens_unprocessed(match.group(2)):
81
+ yield x
82
+ yield (match.start(3), String, match.group(3))
83
+
84
+ else:
85
+ yield (match.start(), String, match.group())
86
+
87
+
88
+ class PostgresBase(object):
89
+ """Base class for Postgres-related lexers.
90
+
91
+ This is implemented as a mixin to avoid the Lexer metaclass kicking in.
92
+ this way the different lexer don't have a common Lexer ancestor. If they
93
+ had, _tokens could be created on this ancestor and not updated for the
94
+ other classes, resulting e.g. in PL/pgSQL parsed as SQL. This shortcoming
95
+ seem to suggest that regexp lexers are not really subclassable.
96
+
97
+ `language_callback` should really be our method, but this breaks deepcopy.
98
+ """
99
+ def get_tokens_unprocessed(self, text, *args):
100
+ # Have a copy of the entire text to be used by `language_callback`.
101
+ self.text = text
102
+ for x in super(PostgresBase, self).get_tokens_unprocessed(
103
+ text, *args):
104
+ yield x
105
+
106
+ def _get_lexer(self, lang):
107
+ if lang.lower() == 'sql':
108
+ return get_lexer_by_name('postgresql', **self.options)
109
+
110
+ tries = [ lang ]
111
+ if lang.startswith('pl'):
112
+ tries.append(lang[2:])
113
+ if lang.endswith('u'):
114
+ tries.append(lang[:-1])
115
+ if lang.startswith('pl') and lang.endswith('u'):
116
+ tries.append(lang[2:-1])
117
+
118
+ for l in tries:
119
+ try:
120
+ return get_lexer_by_name(l, **self.options)
121
+ except ClassNotFound:
122
+ pass
123
+ else:
124
+ # TODO: better logging
125
+ # print >>sys.stderr, "language not found:", lang
126
+ return None
127
+
128
+
129
+ class PostgresLexer(PostgresBase, RegexLexer):
130
+ """
131
+ Lexer for the PostgreSQL dialect of SQL.
132
+
133
+ *New in Pygments 1.5.*
134
+ """
135
+
136
+ name = 'PostgreSQL SQL dialect'
137
+ aliases = ['postgresql', 'postgres']
138
+ mimetypes = ['text/x-postgresql']
139
+
140
+ flags = re.IGNORECASE
141
+ tokens = {
142
+ 'root': [
143
+ (r'\s+', Text),
144
+ (r'--.*?\n', Comment.Single),
145
+ (r'/\*', Comment.Multiline, 'multiline-comments'),
146
+ (r'(' + '|'.join([s.replace(" ", "\s+")
147
+ for s in DATATYPES + PSEUDO_TYPES])
148
+ + r')\b', Name.Builtin),
149
+ (r'(' + '|'.join(KEYWORDS) + r')\b', Keyword),
150
+ (r'[+*/<>=~!@#%^&|`?^-]+', Operator),
151
+ (r'::', Operator), # cast
152
+ (r'\$\d+', Name.Variable),
153
+ (r'([0-9]*\.[0-9]*|[0-9]+)(e[+-]?[0-9]+)?', Number.Float),
154
+ (r'[0-9]+', Number.Integer),
155
+ (r"(E|U&)?'(''|[^'])*'", String.Single),
156
+ (r'(U&)?"(""|[^"])*"', String.Name), # quoted identifier
157
+ (r'(?ms)(\$[^\$]*\$)(.*?)(\1)', language_callback),
158
+ (r'[a-zA-Z_][a-zA-Z0-9_]*', Name),
159
+
160
+ # psql variable in SQL
161
+ (r""":(['"]?)[a-z][a-z0-9_]*\b\1""", Name.Variable),
162
+
163
+ (r'[;:()\[\]\{\},\.]', Punctuation),
164
+ ],
165
+ 'multiline-comments': [
166
+ (r'/\*', Comment.Multiline, 'multiline-comments'),
167
+ (r'\*/', Comment.Multiline, '#pop'),
168
+ (r'[^/\*]+', Comment.Multiline),
169
+ (r'[/*]', Comment.Multiline)
170
+ ],
171
+ }
172
+
173
+
174
+ class PlPgsqlLexer(PostgresBase, RegexLexer):
175
+ """
176
+ Handle the extra syntax in Pl/pgSQL language.
177
+
178
+ *New in Pygments 1.5.*
179
+ """
180
+ name = 'PL/pgSQL'
181
+ aliases = ['plpgsql']
182
+ mimetypes = ['text/x-plpgsql']
183
+
184
+ flags = re.IGNORECASE
185
+ tokens = deepcopy(PostgresLexer.tokens)
186
+
187
+ # extend the keywords list
188
+ for i, pattern in enumerate(tokens['root']):
189
+ if pattern[1] == Keyword:
190
+ tokens['root'][i] = (
191
+ r'(' + '|'.join(KEYWORDS + PLPGSQL_KEYWORDS) + r')\b',
192
+ Keyword)
193
+ del i
194
+ break
195
+ else:
196
+ assert 0, "SQL keywords not found"
197
+
198
+ # Add specific PL/pgSQL rules (before the SQL ones)
199
+ tokens['root'][:0] = [
200
+ (r'\%[a-z][a-z0-9_]*\b', Name.Builtin), # actually, a datatype
201
+ (r':=', Operator),
202
+ (r'\<\<[a-z][a-z0-9_]*\>\>', Name.Label),
203
+ (r'\#[a-z][a-z0-9_]*\b', Keyword.Pseudo), # #variable_conflict
204
+ ]
205
+
206
+
207
+ class PsqlRegexLexer(PostgresBase, RegexLexer):
208
+ """
209
+ Extend the PostgresLexer adding support specific for psql commands.
210
+
211
+ This is not a complete psql lexer yet as it lacks prompt support
212
+ and output rendering.
213
+ """
214
+
215
+ name = 'PostgreSQL console - regexp based lexer'
216
+ aliases = [] # not public
217
+
218
+ flags = re.IGNORECASE
219
+ tokens = deepcopy(PostgresLexer.tokens)
220
+
221
+ tokens['root'].append(
222
+ (r'\\[^\s]+', Keyword.Pseudo, 'psql-command'))
223
+ tokens['psql-command'] = [
224
+ (r'\n', Text, 'root'),
225
+ (r'\s+', Text),
226
+ (r'\\[^\s]+', Keyword.Pseudo),
227
+ (r""":(['"]?)[a-z][a-z0-9_]*\b\1""", Name.Variable),
228
+ (r"'(''|[^'])*'", String.Single),
229
+ (r"`([^`])*`", String.Backtick),
230
+ (r"[^\s]+", String.Symbol),
231
+ ]
232
+
233
+ re_prompt = re.compile(r'^(\S.*?)??[=\-\(\$\'\"][#>]')
234
+ re_psql_command = re.compile(r'\s*\\')
235
+ re_end_command = re.compile(r';\s*(--.*?)?$')
236
+ re_psql_command = re.compile(r'(\s*)(\\.+?)(\s+)$')
237
+ re_error = re.compile(r'(ERROR|FATAL):')
238
+ re_message = re.compile(
239
+ r'((?:DEBUG|INFO|NOTICE|WARNING|ERROR|'
240
+ r'FATAL|HINT|DETAIL|CONTEXT|LINE [0-9]+):)(.*?\n)')
241
+
242
+
243
+ class lookahead(object):
244
+ """Wrap an iterator and allow pushing back an item."""
245
+ def __init__(self, x):
246
+ self.iter = iter(x)
247
+ self._nextitem = None
248
+ def __iter__(self):
249
+ return self
250
+ def send(self, i):
251
+ self._nextitem = i
252
+ return i
253
+ def next(self):
254
+ if self._nextitem is not None:
255
+ ni = self._nextitem
256
+ self._nextitem = None
257
+ return ni
258
+ return self.iter.next()
259
+
260
+
261
+ class PostgresConsoleLexer(Lexer):
262
+ """
263
+ Lexer for psql sessions.
264
+
265
+ *New in Pygments 1.5.*
266
+ """
267
+
268
+ name = 'PostgreSQL console (psql)'
269
+ aliases = ['psql', 'postgresql-console', 'postgres-console']
270
+ mimetypes = ['text/x-postgresql-psql']
271
+
272
+ def get_tokens_unprocessed(self, data):
273
+ sql = PsqlRegexLexer(**self.options)
274
+
275
+ lines = lookahead(line_re.findall(data))
276
+
277
+ # prompt-output cycle
278
+ while 1:
279
+
280
+ # consume the lines of the command: start with an optional prompt
281
+ # and continue until the end of command is detected
282
+ curcode = ''
283
+ insertions = []
284
+ while 1:
285
+ try:
286
+ line = lines.next()
287
+ except StopIteration:
288
+ # allow the emission of partially collected items
289
+ # the repl loop will be broken below
290
+ break
291
+
292
+ # Identify a shell prompt in case of psql commandline example
293
+ if line.startswith('$') and not curcode:
294
+ lexer = get_lexer_by_name('console', **self.options)
295
+ for x in lexer.get_tokens_unprocessed(line):
296
+ yield x
297
+ break
298
+
299
+ # Identify a psql prompt
300
+ mprompt = re_prompt.match(line)
301
+ if mprompt is not None:
302
+ insertions.append((len(curcode),
303
+ [(0, Generic.Prompt, mprompt.group())]))
304
+ curcode += line[len(mprompt.group()):]
305
+ else:
306
+ curcode += line
307
+
308
+ # Check if this is the end of the command
309
+ # TODO: better handle multiline comments at the end with
310
+ # a lexer with an external state?
311
+ if re_psql_command.match(curcode) \
312
+ or re_end_command.search(curcode):
313
+ break
314
+
315
+ # Emit the combined stream of command and prompt(s)
316
+ for item in do_insertions(insertions,
317
+ sql.get_tokens_unprocessed(curcode)):
318
+ yield item
319
+
320
+ # Emit the output lines
321
+ out_token = Generic.Output
322
+ while 1:
323
+ line = lines.next()
324
+ mprompt = re_prompt.match(line)
325
+ if mprompt is not None:
326
+ # push the line back to have it processed by the prompt
327
+ lines.send(line)
328
+ break
329
+
330
+ mmsg = re_message.match(line)
331
+ if mmsg is not None:
332
+ if mmsg.group(1).startswith("ERROR") \
333
+ or mmsg.group(1).startswith("FATAL"):
334
+ out_token = Generic.Error
335
+ yield (mmsg.start(1), Generic.Strong, mmsg.group(1))
336
+ yield (mmsg.start(2), out_token, mmsg.group(2))
337
+ else:
338
+ yield (0, out_token, line)
339
+
340
+
341
+ class SqlLexer(RegexLexer):
342
+ """
343
+ Lexer for Structured Query Language. Currently, this lexer does
344
+ not recognize any special syntax except ANSI SQL.
345
+ """
346
+
347
+ name = 'SQL'
348
+ aliases = ['sql']
349
+ filenames = ['*.sql']
350
+ mimetypes = ['text/x-sql']
351
+
352
+ flags = re.IGNORECASE
353
+ tokens = {
354
+ 'root': [
355
+ (r'\s+', Text),
356
+ (r'--.*?\n', Comment.Single),
357
+ (r'/\*', Comment.Multiline, 'multiline-comments'),
358
+ (r'(ABORT|ABS|ABSOLUTE|ACCESS|ADA|ADD|ADMIN|AFTER|AGGREGATE|'
359
+ r'ALIAS|ALL|ALLOCATE|ALTER|ANALYSE|ANALYZE|AND|ANY|ARE|AS|'
360
+ r'ASC|ASENSITIVE|ASSERTION|ASSIGNMENT|ASYMMETRIC|AT|ATOMIC|'
361
+ r'AUTHORIZATION|AVG|BACKWARD|BEFORE|BEGIN|BETWEEN|BITVAR|'
362
+ r'BIT_LENGTH|BOTH|BREADTH|BY|C|CACHE|CALL|CALLED|CARDINALITY|'
363
+ r'CASCADE|CASCADED|CASE|CAST|CATALOG|CATALOG_NAME|CHAIN|'
364
+ r'CHARACTERISTICS|CHARACTER_LENGTH|CHARACTER_SET_CATALOG|'
365
+ r'CHARACTER_SET_NAME|CHARACTER_SET_SCHEMA|CHAR_LENGTH|CHECK|'
366
+ r'CHECKED|CHECKPOINT|CLASS|CLASS_ORIGIN|CLOB|CLOSE|CLUSTER|'
367
+ r'COALSECE|COBOL|COLLATE|COLLATION|COLLATION_CATALOG|'
368
+ r'COLLATION_NAME|COLLATION_SCHEMA|COLUMN|COLUMN_NAME|'
369
+ r'COMMAND_FUNCTION|COMMAND_FUNCTION_CODE|COMMENT|COMMIT|'
370
+ r'COMMITTED|COMPLETION|CONDITION_NUMBER|CONNECT|CONNECTION|'
371
+ r'CONNECTION_NAME|CONSTRAINT|CONSTRAINTS|CONSTRAINT_CATALOG|'
372
+ r'CONSTRAINT_NAME|CONSTRAINT_SCHEMA|CONSTRUCTOR|CONTAINS|'
373
+ r'CONTINUE|CONVERSION|CONVERT|COPY|CORRESPONTING|COUNT|'
374
+ r'CREATE|CREATEDB|CREATEUSER|CROSS|CUBE|CURRENT|CURRENT_DATE|'
375
+ r'CURRENT_PATH|CURRENT_ROLE|CURRENT_TIME|CURRENT_TIMESTAMP|'
376
+ r'CURRENT_USER|CURSOR|CURSOR_NAME|CYCLE|DATA|DATABASE|'
377
+ r'DATETIME_INTERVAL_CODE|DATETIME_INTERVAL_PRECISION|DAY|'
378
+ r'DEALLOCATE|DECLARE|DEFAULT|DEFAULTS|DEFERRABLE|DEFERRED|'
379
+ r'DEFINED|DEFINER|DELETE|DELIMITER|DELIMITERS|DEREF|DESC|'
380
+ r'DESCRIBE|DESCRIPTOR|DESTROY|DESTRUCTOR|DETERMINISTIC|'
381
+ r'DIAGNOSTICS|DICTIONARY|DISCONNECT|DISPATCH|DISTINCT|DO|'
382
+ r'DOMAIN|DROP|DYNAMIC|DYNAMIC_FUNCTION|DYNAMIC_FUNCTION_CODE|'
383
+ r'EACH|ELSE|ENCODING|ENCRYPTED|END|END-EXEC|EQUALS|ESCAPE|EVERY|'
384
+ r'EXCEPT|ESCEPTION|EXCLUDING|EXCLUSIVE|EXEC|EXECUTE|EXISTING|'
385
+ r'EXISTS|EXPLAIN|EXTERNAL|EXTRACT|FALSE|FETCH|FINAL|FIRST|FOR|'
386
+ r'FORCE|FOREIGN|FORTRAN|FORWARD|FOUND|FREE|FREEZE|FROM|FULL|'
387
+ r'FUNCTION|G|GENERAL|GENERATED|GET|GLOBAL|GO|GOTO|GRANT|GRANTED|'
388
+ r'GROUP|GROUPING|HANDLER|HAVING|HIERARCHY|HOLD|HOST|IDENTITY|'
389
+ r'IGNORE|ILIKE|IMMEDIATE|IMMUTABLE|IMPLEMENTATION|IMPLICIT|IN|'
390
+ r'INCLUDING|INCREMENT|INDEX|INDITCATOR|INFIX|INHERITS|INITIALIZE|'
391
+ r'INITIALLY|INNER|INOUT|INPUT|INSENSITIVE|INSERT|INSTANTIABLE|'
392
+ r'INSTEAD|INTERSECT|INTO|INVOKER|IS|ISNULL|ISOLATION|ITERATE|JOIN|'
393
+ r'KEY|KEY_MEMBER|KEY_TYPE|LANCOMPILER|LANGUAGE|LARGE|LAST|'
394
+ r'LATERAL|LEADING|LEFT|LENGTH|LESS|LEVEL|LIKE|LIMIT|LISTEN|LOAD|'
395
+ r'LOCAL|LOCALTIME|LOCALTIMESTAMP|LOCATION|LOCATOR|LOCK|LOWER|'
396
+ r'MAP|MATCH|MAX|MAXVALUE|MESSAGE_LENGTH|MESSAGE_OCTET_LENGTH|'
397
+ r'MESSAGE_TEXT|METHOD|MIN|MINUTE|MINVALUE|MOD|MODE|MODIFIES|'
398
+ r'MODIFY|MONTH|MORE|MOVE|MUMPS|NAMES|NATIONAL|NATURAL|NCHAR|'
399
+ r'NCLOB|NEW|NEXT|NO|NOCREATEDB|NOCREATEUSER|NONE|NOT|NOTHING|'
400
+ r'NOTIFY|NOTNULL|NULL|NULLABLE|NULLIF|OBJECT|OCTET_LENGTH|OF|OFF|'
401
+ r'OFFSET|OIDS|OLD|ON|ONLY|OPEN|OPERATION|OPERATOR|OPTION|OPTIONS|'
402
+ r'OR|ORDER|ORDINALITY|OUT|OUTER|OUTPUT|OVERLAPS|OVERLAY|OVERRIDING|'
403
+ r'OWNER|PAD|PARAMETER|PARAMETERS|PARAMETER_MODE|PARAMATER_NAME|'
404
+ r'PARAMATER_ORDINAL_POSITION|PARAMETER_SPECIFIC_CATALOG|'
405
+ r'PARAMETER_SPECIFIC_NAME|PARAMATER_SPECIFIC_SCHEMA|PARTIAL|'
406
+ r'PASCAL|PENDANT|PLACING|PLI|POSITION|POSTFIX|PRECISION|PREFIX|'
407
+ r'PREORDER|PREPARE|PRESERVE|PRIMARY|PRIOR|PRIVILEGES|PROCEDURAL|'
408
+ r'PROCEDURE|PUBLIC|READ|READS|RECHECK|RECURSIVE|REF|REFERENCES|'
409
+ r'REFERENCING|REINDEX|RELATIVE|RENAME|REPEATABLE|REPLACE|RESET|'
410
+ r'RESTART|RESTRICT|RESULT|RETURN|RETURNED_LENGTH|'
411
+ r'RETURNED_OCTET_LENGTH|RETURNED_SQLSTATE|RETURNS|REVOKE|RIGHT|'
412
+ r'ROLE|ROLLBACK|ROLLUP|ROUTINE|ROUTINE_CATALOG|ROUTINE_NAME|'
413
+ r'ROUTINE_SCHEMA|ROW|ROWS|ROW_COUNT|RULE|SAVE_POINT|SCALE|SCHEMA|'
414
+ r'SCHEMA_NAME|SCOPE|SCROLL|SEARCH|SECOND|SECURITY|SELECT|SELF|'
415
+ r'SENSITIVE|SERIALIZABLE|SERVER_NAME|SESSION|SESSION_USER|SET|'
416
+ r'SETOF|SETS|SHARE|SHOW|SIMILAR|SIMPLE|SIZE|SOME|SOURCE|SPACE|'
417
+ r'SPECIFIC|SPECIFICTYPE|SPECIFIC_NAME|SQL|SQLCODE|SQLERROR|'
418
+ r'SQLEXCEPTION|SQLSTATE|SQLWARNINIG|STABLE|START|STATE|STATEMENT|'
419
+ r'STATIC|STATISTICS|STDIN|STDOUT|STORAGE|STRICT|STRUCTURE|STYPE|'
420
+ r'SUBCLASS_ORIGIN|SUBLIST|SUBSTRING|SUM|SYMMETRIC|SYSID|SYSTEM|'
421
+ r'SYSTEM_USER|TABLE|TABLE_NAME| TEMP|TEMPLATE|TEMPORARY|TERMINATE|'
422
+ r'THAN|THEN|TIMESTAMP|TIMEZONE_HOUR|TIMEZONE_MINUTE|TO|TOAST|'
423
+ r'TRAILING|TRANSATION|TRANSACTIONS_COMMITTED|'
424
+ r'TRANSACTIONS_ROLLED_BACK|TRANSATION_ACTIVE|TRANSFORM|'
425
+ r'TRANSFORMS|TRANSLATE|TRANSLATION|TREAT|TRIGGER|TRIGGER_CATALOG|'
426
+ r'TRIGGER_NAME|TRIGGER_SCHEMA|TRIM|TRUE|TRUNCATE|TRUSTED|TYPE|'
427
+ r'UNCOMMITTED|UNDER|UNENCRYPTED|UNION|UNIQUE|UNKNOWN|UNLISTEN|'
428
+ r'UNNAMED|UNNEST|UNTIL|UPDATE|UPPER|USAGE|USER|'
429
+ r'USER_DEFINED_TYPE_CATALOG|USER_DEFINED_TYPE_NAME|'
430
+ r'USER_DEFINED_TYPE_SCHEMA|USING|VACUUM|VALID|VALIDATOR|VALUES|'
431
+ r'VARIABLE|VERBOSE|VERSION|VIEW|VOLATILE|WHEN|WHENEVER|WHERE|'
432
+ r'WITH|WITHOUT|WORK|WRITE|YEAR|ZONE)\b', Keyword),
433
+ (r'(ARRAY|BIGINT|BINARY|BIT|BLOB|BOOLEAN|CHAR|CHARACTER|DATE|'
434
+ r'DEC|DECIMAL|FLOAT|INT|INTEGER|INTERVAL|NUMBER|NUMERIC|REAL|'
435
+ r'SERIAL|SMALLINT|VARCHAR|VARYING|INT8|SERIAL8|TEXT)\b',
436
+ Name.Builtin),
437
+ (r'[+*/<>=~!@#%^&|`?^-]', Operator),
438
+ (r'[0-9]+', Number.Integer),
439
+ # TODO: Backslash escapes?
440
+ (r"'(''|[^'])*'", String.Single),
441
+ (r'"(""|[^"])*"', String.Symbol), # not a real string literal in ANSI SQL
442
+ (r'[a-zA-Z_][a-zA-Z0-9_]*', Name),
443
+ (r'[;:()\[\],\.]', Punctuation)
444
+ ],
445
+ 'multiline-comments': [
446
+ (r'/\*', Comment.Multiline, 'multiline-comments'),
447
+ (r'\*/', Comment.Multiline, '#pop'),
448
+ (r'[^/\*]+', Comment.Multiline),
449
+ (r'[/*]', Comment.Multiline)
450
+ ]
451
+ }
452
+
453
+
454
+ class MySqlLexer(RegexLexer):
455
+ """
456
+ Special lexer for MySQL.
457
+ """
458
+
459
+ name = 'MySQL'
460
+ aliases = ['mysql']
461
+ mimetypes = ['text/x-mysql']
462
+
463
+ flags = re.IGNORECASE
464
+ tokens = {
465
+ 'root': [
466
+ (r'\s+', Text),
467
+ (r'(#|--\s+).*?\n', Comment.Single),
468
+ (r'/\*', Comment.Multiline, 'multiline-comments'),
469
+ (r'[0-9]+', Number.Integer),
470
+ (r'[0-9]*\.[0-9]+(e[+-][0-9]+)', Number.Float),
471
+ # TODO: add backslash escapes
472
+ (r"'(''|[^'])*'", String.Single),
473
+ (r'"(""|[^"])*"', String.Double),
474
+ (r"`(``|[^`])*`", String.Symbol),
475
+ (r'[+*/<>=~!@#%^&|`?^-]', Operator),
476
+ (r'\b(tinyint|smallint|mediumint|int|integer|bigint|date|'
477
+ r'datetime|time|bit|bool|tinytext|mediumtext|longtext|text|'
478
+ r'tinyblob|mediumblob|longblob|blob|float|double|double\s+'
479
+ r'precision|real|numeric|dec|decimal|timestamp|year|char|'
480
+ r'varchar|varbinary|varcharacter|enum|set)(\b\s*)(\()?',
481
+ bygroups(Keyword.Type, Text, Punctuation)),
482
+ (r'\b(add|all|alter|analyze|and|as|asc|asensitive|before|between|'
483
+ r'bigint|binary|blob|both|by|call|cascade|case|change|char|'
484
+ r'character|check|collate|column|condition|constraint|continue|'
485
+ r'convert|create|cross|current_date|current_time|'
486
+ r'current_timestamp|current_user|cursor|database|databases|'
487
+ r'day_hour|day_microsecond|day_minute|day_second|dec|decimal|'
488
+ r'declare|default|delayed|delete|desc|describe|deterministic|'
489
+ r'distinct|distinctrow|div|double|drop|dual|each|else|elseif|'
490
+ r'enclosed|escaped|exists|exit|explain|fetch|float|float4|float8'
491
+ r'|for|force|foreign|from|fulltext|grant|group|having|'
492
+ r'high_priority|hour_microsecond|hour_minute|hour_second|if|'
493
+ r'ignore|in|index|infile|inner|inout|insensitive|insert|int|'
494
+ r'int1|int2|int3|int4|int8|integer|interval|into|is|iterate|'
495
+ r'join|key|keys|kill|leading|leave|left|like|limit|lines|load|'
496
+ r'localtime|localtimestamp|lock|long|loop|low_priority|match|'
497
+ r'minute_microsecond|minute_second|mod|modifies|natural|'
498
+ r'no_write_to_binlog|not|numeric|on|optimize|option|optionally|'
499
+ r'or|order|out|outer|outfile|precision|primary|procedure|purge|'
500
+ r'raid0|read|reads|real|references|regexp|release|rename|repeat|'
501
+ r'replace|require|restrict|return|revoke|right|rlike|schema|'
502
+ r'schemas|second_microsecond|select|sensitive|separator|set|'
503
+ r'show|smallint|soname|spatial|specific|sql|sql_big_result|'
504
+ r'sql_calc_found_rows|sql_small_result|sqlexception|sqlstate|'
505
+ r'sqlwarning|ssl|starting|straight_join|table|terminated|then|'
506
+ r'to|trailing|trigger|undo|union|unique|unlock|unsigned|update|'
507
+ r'usage|use|using|utc_date|utc_time|utc_timestamp|values|'
508
+ r'varying|when|where|while|with|write|x509|xor|year_month|'
509
+ r'zerofill)\b', Keyword),
510
+ # TODO: this list is not complete
511
+ (r'\b(auto_increment|engine|charset|tables)\b', Keyword.Pseudo),
512
+ (r'(true|false|null)', Name.Constant),
513
+ (r'([a-zA-Z_][a-zA-Z0-9_]*)(\s*)(\()',
514
+ bygroups(Name.Function, Text, Punctuation)),
515
+ (r'[a-zA-Z_][a-zA-Z0-9_]*', Name),
516
+ (r'@[A-Za-z0-9]*[._]*[A-Za-z0-9]*', Name.Variable),
517
+ (r'[;:()\[\],\.]', Punctuation)
518
+ ],
519
+ 'multiline-comments': [
520
+ (r'/\*', Comment.Multiline, 'multiline-comments'),
521
+ (r'\*/', Comment.Multiline, '#pop'),
522
+ (r'[^/\*]+', Comment.Multiline),
523
+ (r'[/*]', Comment.Multiline)
524
+ ]
525
+ }
526
+
527
+
528
+ class SqliteConsoleLexer(Lexer):
529
+ """
530
+ Lexer for example sessions using sqlite3.
531
+
532
+ *New in Pygments 0.11.*
533
+ """
534
+
535
+ name = 'sqlite3con'
536
+ aliases = ['sqlite3']
537
+ filenames = ['*.sqlite3-console']
538
+ mimetypes = ['text/x-sqlite3-console']
539
+
540
+ def get_tokens_unprocessed(self, data):
541
+ sql = SqlLexer(**self.options)
542
+
543
+ curcode = ''
544
+ insertions = []
545
+ for match in line_re.finditer(data):
546
+ line = match.group()
547
+ if line.startswith('sqlite> ') or line.startswith(' ...> '):
548
+ insertions.append((len(curcode),
549
+ [(0, Generic.Prompt, line[:8])]))
550
+ curcode += line[8:]
551
+ else:
552
+ if curcode:
553
+ for item in do_insertions(insertions,
554
+ sql.get_tokens_unprocessed(curcode)):
555
+ yield item
556
+ curcode = ''
557
+ insertions = []
558
+ if line.startswith('SQL error: '):
559
+ yield (match.start(), Generic.Traceback, line)
560
+ else:
561
+ yield (match.start(), Generic.Output, line)
562
+ if curcode:
563
+ for item in do_insertions(insertions,
564
+ sql.get_tokens_unprocessed(curcode)):
565
+ yield item