pygmentize 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (101) hide show
  1. data/LICENSE +19 -0
  2. data/lib/pygments.rb +23 -0
  3. data/pygmentize.gemspec +11 -0
  4. data/test/pygments.rb +19 -0
  5. data/vendor/pygmentize.py +7 -0
  6. data/vendor/pygments/AUTHORS +73 -0
  7. data/vendor/pygments/LICENSE +25 -0
  8. data/vendor/pygments/__init__.py +91 -0
  9. data/vendor/pygments/__init__.pyc +0 -0
  10. data/vendor/pygments/cmdline.py +430 -0
  11. data/vendor/pygments/cmdline.pyc +0 -0
  12. data/vendor/pygments/console.py +74 -0
  13. data/vendor/pygments/console.pyc +0 -0
  14. data/vendor/pygments/filter.py +74 -0
  15. data/vendor/pygments/filter.pyc +0 -0
  16. data/vendor/pygments/filters/__init__.py +357 -0
  17. data/vendor/pygments/filters/__init__.pyc +0 -0
  18. data/vendor/pygments/formatter.py +92 -0
  19. data/vendor/pygments/formatter.pyc +0 -0
  20. data/vendor/pygments/formatters/__init__.py +68 -0
  21. data/vendor/pygments/formatters/__init__.pyc +0 -0
  22. data/vendor/pygments/formatters/_mapping.py +92 -0
  23. data/vendor/pygments/formatters/_mapping.pyc +0 -0
  24. data/vendor/pygments/formatters/bbcode.py +109 -0
  25. data/vendor/pygments/formatters/bbcode.pyc +0 -0
  26. data/vendor/pygments/formatters/html.py +723 -0
  27. data/vendor/pygments/formatters/html.pyc +0 -0
  28. data/vendor/pygments/formatters/img.py +553 -0
  29. data/vendor/pygments/formatters/img.pyc +0 -0
  30. data/vendor/pygments/formatters/latex.py +354 -0
  31. data/vendor/pygments/formatters/latex.pyc +0 -0
  32. data/vendor/pygments/formatters/other.py +117 -0
  33. data/vendor/pygments/formatters/other.pyc +0 -0
  34. data/vendor/pygments/formatters/rtf.py +136 -0
  35. data/vendor/pygments/formatters/rtf.pyc +0 -0
  36. data/vendor/pygments/formatters/svg.py +154 -0
  37. data/vendor/pygments/formatters/svg.pyc +0 -0
  38. data/vendor/pygments/formatters/terminal.py +109 -0
  39. data/vendor/pygments/formatters/terminal.pyc +0 -0
  40. data/vendor/pygments/formatters/terminal256.py +219 -0
  41. data/vendor/pygments/formatters/terminal256.pyc +0 -0
  42. data/vendor/pygments/lexer.py +660 -0
  43. data/vendor/pygments/lexer.pyc +0 -0
  44. data/vendor/pygments/lexers/__init__.py +226 -0
  45. data/vendor/pygments/lexers/__init__.pyc +0 -0
  46. data/vendor/pygments/lexers/_asybuiltins.py +1645 -0
  47. data/vendor/pygments/lexers/_clbuiltins.py +232 -0
  48. data/vendor/pygments/lexers/_luabuiltins.py +256 -0
  49. data/vendor/pygments/lexers/_mapping.py +234 -0
  50. data/vendor/pygments/lexers/_mapping.pyc +0 -0
  51. data/vendor/pygments/lexers/_phpbuiltins.py +3389 -0
  52. data/vendor/pygments/lexers/_vimbuiltins.py +3 -0
  53. data/vendor/pygments/lexers/agile.py +1485 -0
  54. data/vendor/pygments/lexers/agile.pyc +0 -0
  55. data/vendor/pygments/lexers/asm.py +353 -0
  56. data/vendor/pygments/lexers/compiled.py +2365 -0
  57. data/vendor/pygments/lexers/dotnet.py +355 -0
  58. data/vendor/pygments/lexers/functional.py +756 -0
  59. data/vendor/pygments/lexers/functional.pyc +0 -0
  60. data/vendor/pygments/lexers/math.py +461 -0
  61. data/vendor/pygments/lexers/other.py +2297 -0
  62. data/vendor/pygments/lexers/parsers.py +695 -0
  63. data/vendor/pygments/lexers/special.py +100 -0
  64. data/vendor/pygments/lexers/special.pyc +0 -0
  65. data/vendor/pygments/lexers/templates.py +1387 -0
  66. data/vendor/pygments/lexers/text.py +1586 -0
  67. data/vendor/pygments/lexers/web.py +1619 -0
  68. data/vendor/pygments/lexers/web.pyc +0 -0
  69. data/vendor/pygments/plugin.py +74 -0
  70. data/vendor/pygments/plugin.pyc +0 -0
  71. data/vendor/pygments/scanner.py +104 -0
  72. data/vendor/pygments/style.py +117 -0
  73. data/vendor/pygments/style.pyc +0 -0
  74. data/vendor/pygments/styles/__init__.py +68 -0
  75. data/vendor/pygments/styles/__init__.pyc +0 -0
  76. data/vendor/pygments/styles/autumn.py +65 -0
  77. data/vendor/pygments/styles/borland.py +51 -0
  78. data/vendor/pygments/styles/bw.py +49 -0
  79. data/vendor/pygments/styles/colorful.py +81 -0
  80. data/vendor/pygments/styles/default.py +73 -0
  81. data/vendor/pygments/styles/default.pyc +0 -0
  82. data/vendor/pygments/styles/emacs.py +72 -0
  83. data/vendor/pygments/styles/friendly.py +72 -0
  84. data/vendor/pygments/styles/fruity.py +43 -0
  85. data/vendor/pygments/styles/manni.py +75 -0
  86. data/vendor/pygments/styles/monokai.py +106 -0
  87. data/vendor/pygments/styles/murphy.py +80 -0
  88. data/vendor/pygments/styles/native.py +65 -0
  89. data/vendor/pygments/styles/pastie.py +75 -0
  90. data/vendor/pygments/styles/perldoc.py +69 -0
  91. data/vendor/pygments/styles/tango.py +141 -0
  92. data/vendor/pygments/styles/trac.py +63 -0
  93. data/vendor/pygments/styles/vim.py +63 -0
  94. data/vendor/pygments/styles/vs.py +38 -0
  95. data/vendor/pygments/token.py +198 -0
  96. data/vendor/pygments/token.pyc +0 -0
  97. data/vendor/pygments/unistring.py +130 -0
  98. data/vendor/pygments/unistring.pyc +0 -0
  99. data/vendor/pygments/util.py +226 -0
  100. data/vendor/pygments/util.pyc +0 -0
  101. metadata +166 -0
@@ -0,0 +1,461 @@
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ pygments.lexers.math
4
+ ~~~~~~~~~~~~~~~~~~~~
5
+
6
+ Lexers for math languages.
7
+
8
+ :copyright: Copyright 2006-2010 by the Pygments team, see AUTHORS.
9
+ :license: BSD, see LICENSE for details.
10
+ """
11
+
12
+ import re
13
+
14
+ from pygments.lexer import Lexer, RegexLexer, bygroups, include, do_insertions
15
+ from pygments.token import Comment, String, Punctuation, Keyword, Name, \
16
+ Operator, Number, Text, Generic
17
+
18
+ from pygments.lexers.agile import PythonLexer
19
+
20
+ __all__ = ['MuPADLexer', 'MatlabLexer', 'MatlabSessionLexer', 'NumPyLexer',
21
+ 'RConsoleLexer', 'SLexer']
22
+
23
+
24
+ class MuPADLexer(RegexLexer):
25
+ """
26
+ A `MuPAD <http://www.mupad.com>`_ lexer.
27
+ Contributed by Christopher Creutzig <christopher@creutzig.de>.
28
+
29
+ *New in Pygments 0.8.*
30
+ """
31
+ name = 'MuPAD'
32
+ aliases = ['mupad']
33
+ filenames = ['*.mu']
34
+
35
+ tokens = {
36
+ 'root' : [
37
+ (r'//.*?$', Comment.Single),
38
+ (r'/\*', Comment.Multiline, 'comment'),
39
+ (r'"(?:[^"\\]|\\.)*"', String),
40
+ (r'\(|\)|\[|\]|\{|\}', Punctuation),
41
+ (r'''(?x)\b(?:
42
+ next|break|end|
43
+ axiom|end_axiom|category|end_category|domain|end_domain|inherits|
44
+ if|%if|then|elif|else|end_if|
45
+ case|of|do|otherwise|end_case|
46
+ while|end_while|
47
+ repeat|until|end_repeat|
48
+ for|from|to|downto|step|end_for|
49
+ proc|local|option|save|begin|end_proc|
50
+ delete|frame
51
+ )\b''', Keyword),
52
+ (r'''(?x)\b(?:
53
+ DOM_ARRAY|DOM_BOOL|DOM_COMPLEX|DOM_DOMAIN|DOM_EXEC|DOM_EXPR|
54
+ DOM_FAIL|DOM_FLOAT|DOM_FRAME|DOM_FUNC_ENV|DOM_HFARRAY|DOM_IDENT|
55
+ DOM_INT|DOM_INTERVAL|DOM_LIST|DOM_NIL|DOM_NULL|DOM_POLY|DOM_PROC|
56
+ DOM_PROC_ENV|DOM_RAT|DOM_SET|DOM_STRING|DOM_TABLE|DOM_VAR
57
+ )\b''', Name.Class),
58
+ (r'''(?x)\b(?:
59
+ PI|EULER|E|CATALAN|
60
+ NIL|FAIL|undefined|infinity|
61
+ TRUE|FALSE|UNKNOWN
62
+ )\b''',
63
+ Name.Constant),
64
+ (r'\b(?:dom|procname)\b', Name.Builtin.Pseudo),
65
+ (r'\.|,|:|;|=|\+|-|\*|/|\^|@|>|<|\$|\||!|\'|%|~=', Operator),
66
+ (r'''(?x)\b(?:
67
+ and|or|not|xor|
68
+ assuming|
69
+ div|mod|
70
+ union|minus|intersect|in|subset
71
+ )\b''',
72
+ Operator.Word),
73
+ (r'\b(?:I|RDN_INF|RD_NINF|RD_NAN)\b', Number),
74
+ #(r'\b(?:adt|linalg|newDomain|hold)\b', Name.Builtin),
75
+ (r'''(?x)
76
+ ((?:[a-zA-Z_#][a-zA-Z_#0-9]*|`[^`]*`)
77
+ (?:::[a-zA-Z_#][a-zA-Z_#0-9]*|`[^`]*`)*)\s*([(])''',
78
+ bygroups(Name.Function, Punctuation)),
79
+ (r'''(?x)
80
+ (?:[a-zA-Z_#][a-zA-Z_#0-9]*|`[^`]*`)
81
+ (?:::[a-zA-Z_#][a-zA-Z_#0-9]*|`[^`]*`)*''', Name.Variable),
82
+ (r'[0-9]+(?:\.[0-9]*)?(?:e[0-9]+)?', Number),
83
+ (r'\.[0-9]+(?:e[0-9]+)?', Number),
84
+ (r'.', Text)
85
+ ],
86
+ 'comment' : [
87
+ (r'[^*/]', Comment.Multiline),
88
+ (r'/\*', Comment.Multiline, '#push'),
89
+ (r'\*/', Comment.Multiline, '#pop'),
90
+ (r'[*/]', Comment.Multiline)
91
+ ]
92
+ }
93
+
94
+
95
+ class MatlabLexer(RegexLexer):
96
+ """
97
+ For Matlab (or GNU Octave) source code.
98
+ Contributed by Ken Schutte <kschutte@csail.mit.edu>.
99
+
100
+ *New in Pygments 0.10.*
101
+ """
102
+ name = 'Matlab'
103
+ aliases = ['matlab', 'octave']
104
+ filenames = ['*.m']
105
+ mimetypes = ['text/matlab']
106
+
107
+ #
108
+ # These lists are generated automatically.
109
+ # Run the following in bash shell:
110
+ #
111
+ # for f in elfun specfun elmat; do
112
+ # echo -n "$f = "
113
+ # matlab -nojvm -r "help $f;exit;" | perl -ne \
114
+ # 'push(@c,$1) if /^ (\w+)\s+-/; END {print q{["}.join(q{","},@c).qq{"]\n};}'
115
+ # done
116
+ #
117
+ # elfun: Elementary math functions
118
+ # specfun: Special Math functions
119
+ # elmat: Elementary matrices and matrix manipulation
120
+ #
121
+ # taken from Matlab version 7.4.0.336 (R2007a)
122
+ #
123
+ elfun = ["sin","sind","sinh","asin","asind","asinh","cos","cosd","cosh",
124
+ "acos","acosd","acosh","tan","tand","tanh","atan","atand","atan2",
125
+ "atanh","sec","secd","sech","asec","asecd","asech","csc","cscd",
126
+ "csch","acsc","acscd","acsch","cot","cotd","coth","acot","acotd",
127
+ "acoth","hypot","exp","expm1","log","log1p","log10","log2","pow2",
128
+ "realpow","reallog","realsqrt","sqrt","nthroot","nextpow2","abs",
129
+ "angle","complex","conj","imag","real","unwrap","isreal","cplxpair",
130
+ "fix","floor","ceil","round","mod","rem","sign"]
131
+ specfun = ["airy","besselj","bessely","besselh","besseli","besselk","beta",
132
+ "betainc","betaln","ellipj","ellipke","erf","erfc","erfcx",
133
+ "erfinv","expint","gamma","gammainc","gammaln","psi","legendre",
134
+ "cross","dot","factor","isprime","primes","gcd","lcm","rat",
135
+ "rats","perms","nchoosek","factorial","cart2sph","cart2pol",
136
+ "pol2cart","sph2cart","hsv2rgb","rgb2hsv"]
137
+ elmat = ["zeros","ones","eye","repmat","rand","randn","linspace","logspace",
138
+ "freqspace","meshgrid","accumarray","size","length","ndims","numel",
139
+ "disp","isempty","isequal","isequalwithequalnans","cat","reshape",
140
+ "diag","blkdiag","tril","triu","fliplr","flipud","flipdim","rot90",
141
+ "find","end","sub2ind","ind2sub","bsxfun","ndgrid","permute",
142
+ "ipermute","shiftdim","circshift","squeeze","isscalar","isvector",
143
+ "ans","eps","realmax","realmin","pi","i","inf","nan","isnan",
144
+ "isinf","isfinite","j","why","compan","gallery","hadamard","hankel",
145
+ "hilb","invhilb","magic","pascal","rosser","toeplitz","vander",
146
+ "wilkinson"]
147
+
148
+ tokens = {
149
+ 'root': [
150
+ # line starting with '!' is sent as a system command. not sure what
151
+ # label to use...
152
+ (r'^!.*', String.Other),
153
+ (r'%.*$', Comment),
154
+ (r'^\s*function', Keyword, 'deffunc'),
155
+
156
+ # from 'iskeyword' on version 7.4.0.336 (R2007a):
157
+ (r'(break|case|catch|classdef|continue|else|elseif|end|for|function|'
158
+ r'global|if|otherwise|parfor|persistent|return|switch|try|while)\b',
159
+ Keyword),
160
+
161
+ ("(" + "|".join(elfun+specfun+elmat) + r')\b', Name.Builtin),
162
+
163
+ # operators:
164
+ (r'-|==|~=|<|>|<=|>=|&&|&|~|\|\|?', Operator),
165
+ # operators requiring escape for re:
166
+ (r'\.\*|\*|\+|\.\^|\.\\|\.\/|\/|\\', Operator),
167
+
168
+ # punctuation:
169
+ (r'\[|\]|\(|\)|\{|\}|:|@|\.|,', Punctuation),
170
+ (r'=|:|;', Punctuation),
171
+
172
+ # quote can be transpose, instead of string:
173
+ # (not great, but handles common cases...)
174
+ (r'(?<=[\w\)\]])\'', Operator),
175
+
176
+ (r'(?<![\w\)\]])\'', String, 'string'),
177
+ ('[a-zA-Z_][a-zA-Z0-9_]*', Name),
178
+ (r'.', Text),
179
+ ],
180
+ 'string': [
181
+ (r'[^\']*\'', String, '#pop')
182
+ ],
183
+ 'deffunc': [
184
+ (r'(\s*)(?:(.+)(\s*)(=)(\s*))?(.+)(\()(.*)(\))(\s*)',
185
+ bygroups(Text.Whitespace, Text, Text.Whitespace, Punctuation,
186
+ Text.Whitespace, Name.Function, Punctuation, Text,
187
+ Punctuation, Text.Whitespace), '#pop'),
188
+ ],
189
+ }
190
+
191
+ def analyse_text(text):
192
+ if re.match('^\s*%', text, re.M): # comment
193
+ return 0.9
194
+ elif re.match('^!\w+', text, re.M): # system cmd
195
+ return 0.9
196
+ return 0.1
197
+
198
+ line_re = re.compile('.*?\n')
199
+
200
+ class MatlabSessionLexer(Lexer):
201
+ """
202
+ For Matlab (or GNU Octave) sessions. Modeled after PythonConsoleLexer.
203
+ Contributed by Ken Schutte <kschutte@csail.mit.edu>.
204
+
205
+ *New in Pygments 0.10.*
206
+ """
207
+ name = 'Matlab session'
208
+ aliases = ['matlabsession']
209
+
210
+ def get_tokens_unprocessed(self, text):
211
+ mlexer = MatlabLexer(**self.options)
212
+
213
+ curcode = ''
214
+ insertions = []
215
+
216
+ for match in line_re.finditer(text):
217
+ line = match.group()
218
+
219
+ if line.startswith('>>'):
220
+ insertions.append((len(curcode),
221
+ [(0, Generic.Prompt, line[:3])]))
222
+ curcode += line[3:]
223
+
224
+ elif line.startswith('???'):
225
+
226
+ idx = len(curcode)
227
+
228
+ # without is showing error on same line as before...?
229
+ line = "\n" + line
230
+ token = (0, Generic.Traceback, line)
231
+ insertions.append( (idx, [token,]) )
232
+
233
+ else:
234
+ if curcode:
235
+ for item in do_insertions(
236
+ insertions, mlexer.get_tokens_unprocessed(curcode)):
237
+ yield item
238
+ curcode = ''
239
+ insertions = []
240
+
241
+ yield match.start(), Generic.Output, line
242
+
243
+ if curcode: # or item:
244
+ for item in do_insertions(
245
+ insertions, mlexer.get_tokens_unprocessed(curcode)):
246
+ yield item
247
+
248
+
249
+ class NumPyLexer(PythonLexer):
250
+ '''
251
+ A Python lexer recognizing Numerical Python builtins.
252
+
253
+ *New in Pygments 0.10.*
254
+ '''
255
+
256
+ name = 'NumPy'
257
+ aliases = ['numpy']
258
+
259
+ # override the mimetypes to not inherit them from python
260
+ mimetypes = []
261
+ filenames = []
262
+
263
+ EXTRA_KEYWORDS = set([
264
+ 'abs', 'absolute', 'accumulate', 'add', 'alen', 'all', 'allclose',
265
+ 'alltrue', 'alterdot', 'amax', 'amin', 'angle', 'any', 'append',
266
+ 'apply_along_axis', 'apply_over_axes', 'arange', 'arccos', 'arccosh',
267
+ 'arcsin', 'arcsinh', 'arctan', 'arctan2', 'arctanh', 'argmax', 'argmin',
268
+ 'argsort', 'argwhere', 'around', 'array', 'array2string', 'array_equal',
269
+ 'array_equiv', 'array_repr', 'array_split', 'array_str', 'arrayrange',
270
+ 'asanyarray', 'asarray', 'asarray_chkfinite', 'ascontiguousarray',
271
+ 'asfarray', 'asfortranarray', 'asmatrix', 'asscalar', 'astype',
272
+ 'atleast_1d', 'atleast_2d', 'atleast_3d', 'average', 'bartlett',
273
+ 'base_repr', 'beta', 'binary_repr', 'bincount', 'binomial',
274
+ 'bitwise_and', 'bitwise_not', 'bitwise_or', 'bitwise_xor', 'blackman',
275
+ 'bmat', 'broadcast', 'byte_bounds', 'bytes', 'byteswap', 'c_',
276
+ 'can_cast', 'ceil', 'choose', 'clip', 'column_stack', 'common_type',
277
+ 'compare_chararrays', 'compress', 'concatenate', 'conj', 'conjugate',
278
+ 'convolve', 'copy', 'corrcoef', 'correlate', 'cos', 'cosh', 'cov',
279
+ 'cross', 'cumprod', 'cumproduct', 'cumsum', 'delete', 'deprecate',
280
+ 'diag', 'diagflat', 'diagonal', 'diff', 'digitize', 'disp', 'divide',
281
+ 'dot', 'dsplit', 'dstack', 'dtype', 'dump', 'dumps', 'ediff1d', 'empty',
282
+ 'empty_like', 'equal', 'exp', 'expand_dims', 'expm1', 'extract', 'eye',
283
+ 'fabs', 'fastCopyAndTranspose', 'fft', 'fftfreq', 'fftshift', 'fill',
284
+ 'finfo', 'fix', 'flat', 'flatnonzero', 'flatten', 'fliplr', 'flipud',
285
+ 'floor', 'floor_divide', 'fmod', 'frexp', 'fromarrays', 'frombuffer',
286
+ 'fromfile', 'fromfunction', 'fromiter', 'frompyfunc', 'fromstring',
287
+ 'generic', 'get_array_wrap', 'get_include', 'get_numarray_include',
288
+ 'get_numpy_include', 'get_printoptions', 'getbuffer', 'getbufsize',
289
+ 'geterr', 'geterrcall', 'geterrobj', 'getfield', 'gradient', 'greater',
290
+ 'greater_equal', 'gumbel', 'hamming', 'hanning', 'histogram',
291
+ 'histogram2d', 'histogramdd', 'hsplit', 'hstack', 'hypot', 'i0',
292
+ 'identity', 'ifft', 'imag', 'index_exp', 'indices', 'inf', 'info',
293
+ 'inner', 'insert', 'int_asbuffer', 'interp', 'intersect1d',
294
+ 'intersect1d_nu', 'inv', 'invert', 'iscomplex', 'iscomplexobj',
295
+ 'isfinite', 'isfortran', 'isinf', 'isnan', 'isneginf', 'isposinf',
296
+ 'isreal', 'isrealobj', 'isscalar', 'issctype', 'issubclass_',
297
+ 'issubdtype', 'issubsctype', 'item', 'itemset', 'iterable', 'ix_',
298
+ 'kaiser', 'kron', 'ldexp', 'left_shift', 'less', 'less_equal', 'lexsort',
299
+ 'linspace', 'load', 'loads', 'loadtxt', 'log', 'log10', 'log1p', 'log2',
300
+ 'logical_and', 'logical_not', 'logical_or', 'logical_xor', 'logspace',
301
+ 'lstsq', 'mat', 'matrix', 'max', 'maximum', 'maximum_sctype',
302
+ 'may_share_memory', 'mean', 'median', 'meshgrid', 'mgrid', 'min',
303
+ 'minimum', 'mintypecode', 'mod', 'modf', 'msort', 'multiply', 'nan',
304
+ 'nan_to_num', 'nanargmax', 'nanargmin', 'nanmax', 'nanmin', 'nansum',
305
+ 'ndenumerate', 'ndim', 'ndindex', 'negative', 'newaxis', 'newbuffer',
306
+ 'newbyteorder', 'nonzero', 'not_equal', 'obj2sctype', 'ogrid', 'ones',
307
+ 'ones_like', 'outer', 'permutation', 'piecewise', 'pinv', 'pkgload',
308
+ 'place', 'poisson', 'poly', 'poly1d', 'polyadd', 'polyder', 'polydiv',
309
+ 'polyfit', 'polyint', 'polymul', 'polysub', 'polyval', 'power', 'prod',
310
+ 'product', 'ptp', 'put', 'putmask', 'r_', 'randint', 'random_integers',
311
+ 'random_sample', 'ranf', 'rank', 'ravel', 'real', 'real_if_close',
312
+ 'recarray', 'reciprocal', 'reduce', 'remainder', 'repeat', 'require',
313
+ 'reshape', 'resize', 'restoredot', 'right_shift', 'rint', 'roll',
314
+ 'rollaxis', 'roots', 'rot90', 'round', 'round_', 'row_stack', 's_',
315
+ 'sample', 'savetxt', 'sctype2char', 'searchsorted', 'seed', 'select',
316
+ 'set_numeric_ops', 'set_printoptions', 'set_string_function',
317
+ 'setbufsize', 'setdiff1d', 'seterr', 'seterrcall', 'seterrobj',
318
+ 'setfield', 'setflags', 'setmember1d', 'setxor1d', 'shape',
319
+ 'show_config', 'shuffle', 'sign', 'signbit', 'sin', 'sinc', 'sinh',
320
+ 'size', 'slice', 'solve', 'sometrue', 'sort', 'sort_complex', 'source',
321
+ 'split', 'sqrt', 'square', 'squeeze', 'standard_normal', 'std',
322
+ 'subtract', 'sum', 'svd', 'swapaxes', 'take', 'tan', 'tanh', 'tensordot',
323
+ 'test', 'tile', 'tofile', 'tolist', 'tostring', 'trace', 'transpose',
324
+ 'trapz', 'tri', 'tril', 'trim_zeros', 'triu', 'true_divide', 'typeDict',
325
+ 'typename', 'uniform', 'union1d', 'unique', 'unique1d', 'unravel_index',
326
+ 'unwrap', 'vander', 'var', 'vdot', 'vectorize', 'view', 'vonmises',
327
+ 'vsplit', 'vstack', 'weibull', 'where', 'who', 'zeros', 'zeros_like'
328
+ ])
329
+
330
+ def get_tokens_unprocessed(self, text):
331
+ for index, token, value in \
332
+ PythonLexer.get_tokens_unprocessed(self, text):
333
+ if token is Name and value in self.EXTRA_KEYWORDS:
334
+ yield index, Keyword.Pseudo, value
335
+ else:
336
+ yield index, token, value
337
+
338
+
339
+ class RConsoleLexer(Lexer):
340
+ """
341
+ For R console transcripts or R CMD BATCH output files.
342
+ """
343
+
344
+ name = 'RConsole'
345
+ aliases = ['rconsole', 'rout']
346
+ filenames = ['*.Rout']
347
+
348
+ def get_tokens_unprocessed(self, text):
349
+ slexer = SLexer(**self.options)
350
+
351
+ current_code_block = ''
352
+ insertions = []
353
+
354
+ for match in line_re.finditer(text):
355
+ line = match.group()
356
+ if line.startswith('>') or line.startswith('+'):
357
+ # Colorize the prompt as such,
358
+ # then put rest of line into current_code_block
359
+ insertions.append((len(current_code_block),
360
+ [(0, Generic.Prompt, line[:2])]))
361
+ current_code_block += line[2:]
362
+ else:
363
+ # We have reached a non-prompt line!
364
+ # If we have stored prompt lines, need to process them first.
365
+ if current_code_block:
366
+ # Weave together the prompts and highlight code.
367
+ for item in do_insertions(insertions,
368
+ slexer.get_tokens_unprocessed(current_code_block)):
369
+ yield item
370
+ # Reset vars for next code block.
371
+ current_code_block = ''
372
+ insertions = []
373
+ # Now process the actual line itself, this is output from R.
374
+ yield match.start(), Generic.Output, line
375
+
376
+ # If we happen to end on a code block with nothing after it, need to
377
+ # process the last code block. This is neither elegant nor DRY so
378
+ # should be changed.
379
+ if current_code_block:
380
+ for item in do_insertions(insertions,
381
+ slexer.get_tokens_unprocessed(current_code_block)):
382
+ yield item
383
+
384
+
385
+ class SLexer(RegexLexer):
386
+ """
387
+ For S, S-plus, and R source code.
388
+
389
+ *New in Pygments 0.10.*
390
+ """
391
+
392
+ name = 'S'
393
+ aliases = ['splus', 's', 'r']
394
+ filenames = ['*.S', '*.R']
395
+ mimetypes = ['text/S-plus', 'text/S', 'text/R']
396
+
397
+ tokens = {
398
+ 'comments': [
399
+ (r'#.*$', Comment.Single),
400
+ ],
401
+ 'valid_name': [
402
+ (r'[a-zA-Z][0-9a-zA-Z\._]+', Text),
403
+ (r'`.+`', String.Backtick),
404
+ ],
405
+ 'punctuation': [
406
+ (r'\[|\]|\[\[|\]\]|\$|\(|\)|@|:::?|;|,', Punctuation),
407
+ ],
408
+ 'keywords': [
409
+ (r'for(?=\s*\()|while(?=\s*\()|if(?=\s*\()|(?<=\s)else|'
410
+ r'(?<=\s)break(?=;|$)|return(?=\s*\()|function(?=\s*\()',
411
+ Keyword.Reserved)
412
+ ],
413
+ 'operators': [
414
+ (r'<-|-|==|<=|>=|<|>|&&|&|!=|\|\|?', Operator),
415
+ (r'\*|\+|\^|/|%%|%/%|=', Operator),
416
+ (r'%in%|%*%', Operator)
417
+ ],
418
+ 'builtin_symbols': [
419
+ (r'(NULL|NA|TRUE|FALSE|NaN)\b', Keyword.Constant),
420
+ (r'(T|F)\b', Keyword.Variable),
421
+ ],
422
+ 'numbers': [
423
+ (r'(?<![0-9a-zA-Z\)\}\]`\"])(?=\s*)[-\+]?[0-9]+'
424
+ r'(\.[0-9]*)?(E[0-9][-\+]?(\.[0-9]*)?)?', Number),
425
+ (r'\.[0-9]*(E[0-9][-\+]?(\.[0-9]*)?)?', Number),
426
+ ],
427
+ 'statements': [
428
+ include('comments'),
429
+ # whitespaces
430
+ (r'\s+', Text),
431
+ (r'\'', String, 'string_squote'),
432
+ (r'\"', String, 'string_dquote'),
433
+ include('builtin_symbols'),
434
+ include('numbers'),
435
+ include('keywords'),
436
+ include('punctuation'),
437
+ include('operators'),
438
+ include('valid_name'),
439
+ ],
440
+ 'root': [
441
+ include('statements'),
442
+ # blocks:
443
+ (r'\{|\}', Punctuation),
444
+ #(r'\{', Punctuation, 'block'),
445
+ (r'.', Text),
446
+ ],
447
+ #'block': [
448
+ # include('statements'),
449
+ # ('\{', Punctuation, '#push'),
450
+ # ('\}', Punctuation, '#pop')
451
+ #],
452
+ 'string_squote': [
453
+ (r'[^\']*\'', String, '#pop'),
454
+ ],
455
+ 'string_dquote': [
456
+ (r'[^\"]*\"', String, '#pop'),
457
+ ],
458
+ }
459
+
460
+ def analyse_text(text):
461
+ return '<-' in text