pygments.rb 0.3.2 → 0.3.3
Sign up to get free protection for your applications and to get access to all the features.
- data/README.md +6 -1
- data/lexers +0 -0
- data/lib/pygments/version.rb +1 -1
- data/vendor/pygments-main/AUTHORS +15 -0
- data/vendor/pygments-main/CHANGES +28 -1
- data/vendor/pygments-main/LICENSE +1 -1
- data/vendor/pygments-main/external/lasso-builtins-generator-9.lasso +121 -0
- data/vendor/pygments-main/pygments/cmdline.py +1 -1
- data/vendor/pygments-main/pygments/filters/__init__.py +0 -1
- data/vendor/pygments-main/pygments/formatters/_mapping.py +2 -2
- data/vendor/pygments-main/pygments/formatters/img.py +1 -1
- data/vendor/pygments-main/pygments/formatters/latex.py +8 -8
- data/vendor/pygments-main/pygments/formatters/other.py +0 -2
- data/vendor/pygments-main/pygments/lexers/_lassobuiltins.py +5413 -0
- data/vendor/pygments-main/pygments/lexers/_mapping.py +36 -11
- data/vendor/pygments-main/pygments/lexers/_openedgebuiltins.py +551 -0
- data/vendor/pygments-main/pygments/lexers/_postgres_builtins.py +0 -1
- data/vendor/pygments-main/pygments/lexers/_robotframeworklexer.py +546 -0
- data/vendor/pygments-main/pygments/lexers/_sourcemodbuiltins.py +1072 -0
- data/vendor/pygments-main/pygments/lexers/_stan_builtins.py +174 -0
- data/vendor/pygments-main/pygments/lexers/_vimbuiltins.py +13 -3
- data/vendor/pygments-main/pygments/lexers/agile.py +145 -33
- data/vendor/pygments-main/pygments/lexers/asm.py +2 -2
- data/vendor/pygments-main/pygments/lexers/compiled.py +328 -36
- data/vendor/pygments-main/pygments/lexers/dalvik.py +104 -0
- data/vendor/pygments-main/pygments/lexers/dotnet.py +8 -14
- data/vendor/pygments-main/pygments/lexers/functional.py +773 -8
- data/vendor/pygments-main/pygments/lexers/jvm.py +184 -36
- data/vendor/pygments-main/pygments/lexers/math.py +349 -23
- data/vendor/pygments-main/pygments/lexers/other.py +315 -492
- data/vendor/pygments-main/pygments/lexers/parsers.py +83 -1
- data/vendor/pygments-main/pygments/lexers/shell.py +4 -1
- data/vendor/pygments-main/pygments/lexers/templates.py +112 -2
- data/vendor/pygments-main/pygments/lexers/text.py +52 -3
- data/vendor/pygments-main/pygments/lexers/web.py +382 -36
- data/vendor/pygments-main/pygments/unistring.py +35 -25
- data/vendor/pygments-main/pygments/util.py +45 -0
- data/vendor/pygments-main/tests/examplefiles/Config.in.cache +1973 -0
- data/vendor/pygments-main/tests/examplefiles/example.Rd +78 -0
- data/vendor/pygments-main/tests/examplefiles/example.bug +54 -0
- data/vendor/pygments-main/tests/examplefiles/example.ceylon +33 -0
- data/vendor/pygments-main/tests/examplefiles/example.jag +48 -0
- data/vendor/pygments-main/tests/examplefiles/example.monkey +152 -0
- data/vendor/pygments-main/tests/examplefiles/example.msc +43 -0
- data/vendor/pygments-main/tests/examplefiles/example.reg +19 -0
- data/vendor/pygments-main/tests/examplefiles/example.rkt +95 -0
- data/vendor/pygments-main/tests/examplefiles/example.rpf +4 -0
- data/vendor/pygments-main/tests/examplefiles/example.stan +97 -0
- data/vendor/pygments-main/tests/examplefiles/example.xtend +34 -0
- data/vendor/pygments-main/tests/examplefiles/example2.msc +79 -0
- data/vendor/pygments-main/tests/examplefiles/garcia-wachs.kk +123 -0
- data/vendor/pygments-main/tests/examplefiles/hello.smali +40 -0
- data/vendor/pygments-main/tests/examplefiles/hello.sp +9 -0
- data/vendor/pygments-main/tests/examplefiles/http_request_example +2 -1
- data/vendor/pygments-main/tests/examplefiles/http_response_example +4 -2
- data/vendor/pygments-main/tests/examplefiles/inet_pton6.dg +71 -0
- data/vendor/pygments-main/tests/examplefiles/json.lasso +301 -0
- data/vendor/pygments-main/tests/examplefiles/json.lasso9 +213 -0
- data/vendor/pygments-main/tests/examplefiles/livescript-demo.ls +41 -0
- data/vendor/pygments-main/tests/examplefiles/matlab_sample +5 -2
- data/vendor/pygments-main/tests/examplefiles/metagrammar.treetop +455 -0
- data/vendor/pygments-main/tests/examplefiles/pytb_test3.pytb +4 -0
- data/vendor/pygments-main/tests/examplefiles/robotframework.txt +39 -0
- data/vendor/pygments-main/tests/examplefiles/rust_example.rs +743 -0
- data/vendor/pygments-main/tests/examplefiles/test.R +149 -115
- data/vendor/pygments-main/tests/examplefiles/test.cu +36 -0
- data/vendor/pygments-main/tests/test_basic_api.py +1 -1
- data/vendor/pygments-main/tests/test_util.py +18 -0
- metadata +34 -3
- data/vendor/pygments-main/REVISION +0 -1
@@ -20,7 +20,8 @@ from pygments import unistring as uni
|
|
20
20
|
|
21
21
|
|
22
22
|
__all__ = ['JavaLexer', 'ScalaLexer', 'GosuLexer', 'GosuTemplateLexer',
|
23
|
-
'GroovyLexer', 'IokeLexer', 'ClojureLexer', 'KotlinLexer'
|
23
|
+
'GroovyLexer', 'IokeLexer', 'ClojureLexer', 'KotlinLexer',
|
24
|
+
'XtendLexer', 'AspectJLexer', 'CeylonLexer']
|
24
25
|
|
25
26
|
|
26
27
|
class JavaLexer(RegexLexer):
|
@@ -35,15 +36,12 @@ class JavaLexer(RegexLexer):
|
|
35
36
|
|
36
37
|
flags = re.MULTILINE | re.DOTALL
|
37
38
|
|
38
|
-
#: optional Comment or Whitespace
|
39
|
-
_ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
|
40
|
-
|
41
39
|
tokens = {
|
42
40
|
'root': [
|
43
41
|
# method names
|
44
|
-
(r'^(\s*(?:[a-zA-Z_][a-zA-Z0-9_\.\[\]]*\s+)+?)' # return arguments
|
45
|
-
r'([a-zA-Z_][a-zA-Z0-9_]*)'
|
46
|
-
r'(\s*)(\()',
|
42
|
+
(r'^(\s*(?:[a-zA-Z_][a-zA-Z0-9_\.\[\]<>]*\s+)+?)' # return arguments
|
43
|
+
r'([a-zA-Z_][a-zA-Z0-9_]*)' # method name
|
44
|
+
r'(\s*)(\()', # signature start
|
47
45
|
bygroups(using(this), Name.Function, Text, Operator)),
|
48
46
|
(r'[^\S\n]+', Text),
|
49
47
|
(r'//.*?\n', Comment.Single),
|
@@ -62,13 +60,13 @@ class JavaLexer(RegexLexer):
|
|
62
60
|
(r'(class|interface)(\s+)', bygroups(Keyword.Declaration, Text), 'class'),
|
63
61
|
(r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
|
64
62
|
(r'"(\\\\|\\"|[^"])*"', String),
|
65
|
-
(r"'\\.'|'[^\\]'|'\\u[0-9a-
|
63
|
+
(r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
|
66
64
|
(r'(\.)([a-zA-Z_][a-zA-Z0-9_]*)', bygroups(Operator, Name.Attribute)),
|
67
65
|
(r'[a-zA-Z_][a-zA-Z0-9_]*:', Name.Label),
|
68
66
|
(r'[a-zA-Z_\$][a-zA-Z0-9_]*', Name),
|
69
67
|
(r'[~\^\*!%&\[\]\(\)\{\}<>\|+=:;,./?-]', Operator),
|
70
68
|
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
|
71
|
-
(r'0x[0-9a-
|
69
|
+
(r'0x[0-9a-fA-F]+', Number.Hex),
|
72
70
|
(r'[0-9]+L?', Number.Integer),
|
73
71
|
(r'\n', Text)
|
74
72
|
],
|
@@ -81,6 +79,45 @@ class JavaLexer(RegexLexer):
|
|
81
79
|
}
|
82
80
|
|
83
81
|
|
82
|
+
class AspectJLexer(JavaLexer):
|
83
|
+
"""
|
84
|
+
For `AspectJ <http://www.eclipse.org/aspectj/>`_ source code.
|
85
|
+
|
86
|
+
*New in Pygments 1.6.*
|
87
|
+
"""
|
88
|
+
|
89
|
+
name = 'AspectJ'
|
90
|
+
aliases = ['aspectj']
|
91
|
+
filenames = ['*.aj']
|
92
|
+
mimetypes = ['text/x-aspectj']
|
93
|
+
|
94
|
+
aj_keywords = [
|
95
|
+
'aspect', 'pointcut', 'privileged', 'call', 'execution',
|
96
|
+
'initialization', 'preinitialization', 'handler', 'get', 'set',
|
97
|
+
'staticinitialization', 'target', 'args', 'within', 'withincode',
|
98
|
+
'cflow', 'cflowbelow', 'annotation', 'before', 'after', 'around',
|
99
|
+
'proceed', 'throwing', 'returning', 'adviceexecution', 'declare',
|
100
|
+
'parents', 'warning', 'error', 'soft', 'precedence', 'thisJoinPoint',
|
101
|
+
'thisJoinPointStaticPart', 'thisEnclosingJoinPointStaticPart',
|
102
|
+
'issingleton', 'perthis', 'pertarget', 'percflow', 'percflowbelow',
|
103
|
+
'pertypewithin', 'lock', 'unlock', 'thisAspectInstance'
|
104
|
+
]
|
105
|
+
aj_inter_type = ['parents:', 'warning:', 'error:', 'soft:', 'precedence:']
|
106
|
+
aj_inter_type_annotation = ['@type', '@method', '@constructor', '@field']
|
107
|
+
|
108
|
+
def get_tokens_unprocessed(self, text):
|
109
|
+
for index, token, value in JavaLexer.get_tokens_unprocessed(self, text):
|
110
|
+
if token is Name and value in self.aj_keywords:
|
111
|
+
yield index, Keyword, value
|
112
|
+
elif token is Name.Label and value in self.aj_inter_type:
|
113
|
+
yield index, Keyword, value[:-1]
|
114
|
+
yield index, Operator, value[-1]
|
115
|
+
elif token is Name.Decorator and value in self.aj_inter_type_annotation:
|
116
|
+
yield index, Keyword, value
|
117
|
+
else:
|
118
|
+
yield index, token, value
|
119
|
+
|
120
|
+
|
84
121
|
class ScalaLexer(RegexLexer):
|
85
122
|
"""
|
86
123
|
For `Scala <http://www.scala-lang.org>`_ source code.
|
@@ -93,9 +130,6 @@ class ScalaLexer(RegexLexer):
|
|
93
130
|
|
94
131
|
flags = re.MULTILINE | re.DOTALL
|
95
132
|
|
96
|
-
#: optional Comment or Whitespace
|
97
|
-
_ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
|
98
|
-
|
99
133
|
# don't use raw unicode strings!
|
100
134
|
op = u'[-~\\^\\*!%&\\\\<>\\|+=:/?@\u00a6-\u00a7\u00a9\u00ac\u00ae\u00b0-\u00b1\u00b6\u00d7\u00f7\u03f6\u0482\u0606-\u0608\u060e-\u060f\u06e9\u06fd-\u06fe\u07f6\u09fa\u0b70\u0bf3-\u0bf8\u0bfa\u0c7f\u0cf1-\u0cf2\u0d79\u0f01-\u0f03\u0f13-\u0f17\u0f1a-\u0f1f\u0f34\u0f36\u0f38\u0fbe-\u0fc5\u0fc7-\u0fcf\u109e-\u109f\u1360\u1390-\u1399\u1940\u19e0-\u19ff\u1b61-\u1b6a\u1b74-\u1b7c\u2044\u2052\u207a-\u207c\u208a-\u208c\u2100-\u2101\u2103-\u2106\u2108-\u2109\u2114\u2116-\u2118\u211e-\u2123\u2125\u2127\u2129\u212e\u213a-\u213b\u2140-\u2144\u214a-\u214d\u214f\u2190-\u2328\u232b-\u244a\u249c-\u24e9\u2500-\u2767\u2794-\u27c4\u27c7-\u27e5\u27f0-\u2982\u2999-\u29d7\u29dc-\u29fb\u29fe-\u2b54\u2ce5-\u2cea\u2e80-\u2ffb\u3004\u3012-\u3013\u3020\u3036-\u3037\u303e-\u303f\u3190-\u3191\u3196-\u319f\u31c0-\u31e3\u3200-\u321e\u322a-\u3250\u3260-\u327f\u328a-\u32b0\u32c0-\u33ff\u4dc0-\u4dff\ua490-\ua4c6\ua828-\ua82b\ufb29\ufdfd\ufe62\ufe64-\ufe66\uff0b\uff1c-\uff1e\uff5c\uff5e\uffe2\uffe4\uffe8-\uffee\ufffc-\ufffd]+'
|
101
135
|
|
@@ -125,9 +159,9 @@ class ScalaLexer(RegexLexer):
|
|
125
159
|
(r'(true|false|null)\b', Keyword.Constant),
|
126
160
|
(r'(import|package)(\s+)', bygroups(Keyword, Text), 'import'),
|
127
161
|
(r'(type)(\s+)', bygroups(Keyword, Text), 'type'),
|
128
|
-
(r'""".*?"""', String),
|
162
|
+
(r'""".*?"""(?!")', String),
|
129
163
|
(r'"(\\\\|\\"|[^"])*"', String),
|
130
|
-
(r"'\\.'|'[^\\]'|'\\u[0-9a-
|
164
|
+
(r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
|
131
165
|
# (ur'(\.)(%s|%s|`[^`]+`)' % (idrest, op), bygroups(Operator,
|
132
166
|
# Name.Attribute)),
|
133
167
|
(idrest, Name),
|
@@ -137,7 +171,7 @@ class ScalaLexer(RegexLexer):
|
|
137
171
|
(op, Operator),
|
138
172
|
(r'([0-9][0-9]*\.[0-9]*|\.[0-9]+)([eE][+-]?[0-9]+)?[fFdD]?',
|
139
173
|
Number.Float),
|
140
|
-
(r'0x[0-9a-
|
174
|
+
(r'0x[0-9a-fA-F]+', Number.Hex),
|
141
175
|
(r'[0-9]+L?', Number.Integer),
|
142
176
|
(r'\n', Text)
|
143
177
|
],
|
@@ -197,9 +231,6 @@ class GosuLexer(RegexLexer):
|
|
197
231
|
|
198
232
|
flags = re.MULTILINE | re.DOTALL
|
199
233
|
|
200
|
-
#: optional Comment or Whitespace
|
201
|
-
_ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
|
202
|
-
|
203
234
|
tokens = {
|
204
235
|
'root': [
|
205
236
|
# method names
|
@@ -298,9 +329,6 @@ class GroovyLexer(RegexLexer):
|
|
298
329
|
|
299
330
|
flags = re.MULTILINE | re.DOTALL
|
300
331
|
|
301
|
-
#: optional Comment or Whitespace
|
302
|
-
_ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
|
303
|
-
|
304
332
|
tokens = {
|
305
333
|
'root': [
|
306
334
|
# method names
|
@@ -329,13 +357,13 @@ class GroovyLexer(RegexLexer):
|
|
329
357
|
(r"'(\\\\|\\'|[^'])*'", String.Single),
|
330
358
|
(r'\$/((?!/\$).)*/\$', String),
|
331
359
|
(r'/(\\\\|\\"|[^/])*/', String),
|
332
|
-
(r"'\\.'|'[^\\]'|'\\u[0-9a-
|
360
|
+
(r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
|
333
361
|
(r'(\.)([a-zA-Z_][a-zA-Z0-9_]*)', bygroups(Operator, Name.Attribute)),
|
334
362
|
(r'[a-zA-Z_][a-zA-Z0-9_]*:', Name.Label),
|
335
363
|
(r'[a-zA-Z_\$][a-zA-Z0-9_]*', Name),
|
336
364
|
(r'[~\^\*!%&\[\]\(\)\{\}<>\|+=:;,./?-]', Operator),
|
337
365
|
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
|
338
|
-
(r'0x[0-9a-
|
366
|
+
(r'0x[0-9a-fA-F]+', Number.Hex),
|
339
367
|
(r'[0-9]+L?', Number.Integer),
|
340
368
|
(r'\n', Text)
|
341
369
|
],
|
@@ -644,7 +672,7 @@ class ClojureLexer(RegexLexer):
|
|
644
672
|
(r"\\(.|[a-z]+)", String.Char),
|
645
673
|
|
646
674
|
# keywords
|
647
|
-
(r'
|
675
|
+
(r'::?' + valid_name, String.Symbol),
|
648
676
|
|
649
677
|
# special operators
|
650
678
|
(r'~@|[`\'#^~&]', Operator),
|
@@ -690,9 +718,6 @@ class TeaLangLexer(RegexLexer):
|
|
690
718
|
|
691
719
|
flags = re.MULTILINE | re.DOTALL
|
692
720
|
|
693
|
-
#: optional Comment or Whitespace
|
694
|
-
_ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
|
695
|
-
|
696
721
|
tokens = {
|
697
722
|
'root': [
|
698
723
|
# method names
|
@@ -717,7 +742,7 @@ class TeaLangLexer(RegexLexer):
|
|
717
742
|
(r'[a-zA-Z_\$][a-zA-Z0-9_]*', Name),
|
718
743
|
(r'(isa|[.]{3}|[.]{2}|[=#!<>+-/%&;,.\*\\\(\)\[\]\{\}])', Operator),
|
719
744
|
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
|
720
|
-
(r'0x[0-9a-
|
745
|
+
(r'0x[0-9a-fA-F]+', Number.Hex),
|
721
746
|
(r'[0-9]+L?', Number.Integer),
|
722
747
|
(r'\n', Text)
|
723
748
|
],
|
@@ -729,6 +754,69 @@ class TeaLangLexer(RegexLexer):
|
|
729
754
|
],
|
730
755
|
}
|
731
756
|
|
757
|
+
class CeylonLexer(RegexLexer):
|
758
|
+
"""
|
759
|
+
For `Ceylon <http://ceylon-lang.org/>`_ source code.
|
760
|
+
|
761
|
+
*New in Pygments 1.6.*
|
762
|
+
"""
|
763
|
+
|
764
|
+
name = 'Ceylon'
|
765
|
+
aliases = ['ceylon']
|
766
|
+
filenames = ['*.ceylon']
|
767
|
+
mimetypes = ['text/x-ceylon']
|
768
|
+
|
769
|
+
flags = re.MULTILINE | re.DOTALL
|
770
|
+
|
771
|
+
#: optional Comment or Whitespace
|
772
|
+
_ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
|
773
|
+
|
774
|
+
tokens = {
|
775
|
+
'root': [
|
776
|
+
# method names
|
777
|
+
(r'^(\s*(?:[a-zA-Z_][a-zA-Z0-9_\.\[\]]*\s+)+?)' # return arguments
|
778
|
+
r'([a-zA-Z_][a-zA-Z0-9_]*)' # method name
|
779
|
+
r'(\s*)(\()', # signature start
|
780
|
+
bygroups(using(this), Name.Function, Text, Operator)),
|
781
|
+
(r'[^\S\n]+', Text),
|
782
|
+
(r'//.*?\n', Comment.Single),
|
783
|
+
(r'/\*.*?\*/', Comment.Multiline),
|
784
|
+
(r'(variable|shared|abstract|doc|by|formal|actual)', Name.Decorator),
|
785
|
+
(r'(break|case|catch|continue|default|else|finally|for|in|variable|'
|
786
|
+
r'if|return|switch|this|throw|try|while|is|exists|nonempty|then|outer)\b',
|
787
|
+
Keyword),
|
788
|
+
(r'(abstracts|extends|satisfies|adapts|'
|
789
|
+
r'super|given|of|out|assign|'
|
790
|
+
r'transient|volatile)\b', Keyword.Declaration),
|
791
|
+
(r'(function|value|void)\b',
|
792
|
+
Keyword.Type),
|
793
|
+
(r'(package)(\s+)', bygroups(Keyword.Namespace, Text)),
|
794
|
+
(r'(true|false|null)\b', Keyword.Constant),
|
795
|
+
(r'(class|interface|object)(\s+)', bygroups(Keyword.Declaration, Text), 'class'),
|
796
|
+
(r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
|
797
|
+
(r'"(\\\\|\\"|[^"])*"', String),
|
798
|
+
(r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Quoted),
|
799
|
+
(r"`\\.`|`[^\\]`|`\\u[0-9a-fA-F]{4}`", String.Char),
|
800
|
+
(r'(\.)([a-zA-Z_][a-zA-Z0-9_]*)', bygroups(Operator, Name.Attribute)),
|
801
|
+
(r'[a-zA-Z_][a-zA-Z0-9_]*:', Name.Label),
|
802
|
+
(r'[a-zA-Z_\$][a-zA-Z0-9_]*', Name),
|
803
|
+
(r'[~\^\*!%&\[\]\(\)\{\}<>\|+=:;,./?-]', Operator),
|
804
|
+
(r'\d{1,3}(_\d{3})+\.\d{1,3}(_\d{3})+[kMGTPmunpf]?', Number.Float),
|
805
|
+
(r'\d{1,3}(_\d{3})+\.[0-9]+([eE][+-]?[0-9]+)?[kMGTPmunpf]?', Number.Float),
|
806
|
+
(r'[0-9][0-9]*\.\d{1,3}(_\d{3})+[kMGTPmunpf]?', Number.Float),
|
807
|
+
(r'[0-9][0-9]*\.[0-9]+([eE][+-]?[0-9]+)?[kMGTPmunpf]?', Number.Float),
|
808
|
+
(r'0x[0-9a-fA-F]+', Number.Hex),
|
809
|
+
(r'\d{1,3}(_\d{3})+[kMGTP]?', Number.Integer),
|
810
|
+
(r'[0-9]+[kMGTP]?', Number.Integer),
|
811
|
+
(r'\n', Text)
|
812
|
+
],
|
813
|
+
'class': [
|
814
|
+
(r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
|
815
|
+
],
|
816
|
+
'import': [
|
817
|
+
(r'[a-zA-Z0-9_.]+\w+ \{([a-zA-Z,]+|\.\.\.)\}', Name.Namespace, '#pop')
|
818
|
+
],
|
819
|
+
}
|
732
820
|
|
733
821
|
class KotlinLexer(RegexLexer):
|
734
822
|
"""
|
@@ -764,20 +852,15 @@ class KotlinLexer(RegexLexer):
|
|
764
852
|
# for the range of allowed unicode characters in identifiers,
|
765
853
|
# see http://www.ecma-international.org/publications/files/ECMA-ST/Ecma-334.pdf
|
766
854
|
|
767
|
-
def _escape(st):
|
768
|
-
return st.replace(u'\\', ur'\\').replace(u'-', ur'\-').\
|
769
|
-
replace(u'[', ur'\[').replace(u']', ur'\]')
|
770
|
-
|
771
855
|
levels = {
|
772
856
|
'none': '@?[_a-zA-Z][a-zA-Z0-9_]*',
|
773
857
|
'basic': ('@?[_' + uni.Lu + uni.Ll + uni.Lt + uni.Lm + uni.Nl + ']' +
|
774
858
|
'[' + uni.Lu + uni.Ll + uni.Lt + uni.Lm + uni.Nl +
|
775
859
|
uni.Nd + uni.Pc + uni.Cf + uni.Mn + uni.Mc + ']*'),
|
776
860
|
'full': ('@?(?:_|[^' +
|
777
|
-
|
778
|
-
+ '[^' +
|
779
|
-
|
780
|
-
'Mc')) + ']*'),
|
861
|
+
uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl') + '])'
|
862
|
+
+ '[^' + uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl',
|
863
|
+
'Nd', 'Pc', 'Cf', 'Mn', 'Mc') + ']*'),
|
781
864
|
}
|
782
865
|
|
783
866
|
tokens = {}
|
@@ -845,3 +928,68 @@ class KotlinLexer(RegexLexer):
|
|
845
928
|
self._tokens = self._all_tokens[level]
|
846
929
|
|
847
930
|
RegexLexer.__init__(self, **options)
|
931
|
+
|
932
|
+
|
933
|
+
class XtendLexer(RegexLexer):
|
934
|
+
"""
|
935
|
+
For `Xtend <http://xtend-lang.org/>`_ source code.
|
936
|
+
|
937
|
+
*New in Pygments 1.6.*
|
938
|
+
"""
|
939
|
+
|
940
|
+
name = 'Xtend'
|
941
|
+
aliases = ['xtend']
|
942
|
+
filenames = ['*.xtend']
|
943
|
+
mimetypes = ['text/x-xtend']
|
944
|
+
|
945
|
+
flags = re.MULTILINE | re.DOTALL
|
946
|
+
|
947
|
+
tokens = {
|
948
|
+
'root': [
|
949
|
+
# method names
|
950
|
+
(r'^(\s*(?:[a-zA-Z_][a-zA-Z0-9_\.\[\]]*\s+)+?)' # return arguments
|
951
|
+
r'([a-zA-Z_$][a-zA-Z0-9_$]*)' # method name
|
952
|
+
r'(\s*)(\()', # signature start
|
953
|
+
bygroups(using(this), Name.Function, Text, Operator)),
|
954
|
+
(r'[^\S\n]+', Text),
|
955
|
+
(r'//.*?\n', Comment.Single),
|
956
|
+
(r'/\*.*?\*/', Comment.Multiline),
|
957
|
+
(r'@[a-zA-Z_][a-zA-Z0-9_\.]*', Name.Decorator),
|
958
|
+
(r'(assert|break|case|catch|continue|default|do|else|finally|for|'
|
959
|
+
r'if|goto|instanceof|new|return|switch|this|throw|try|while|IF|'
|
960
|
+
r'ELSE|ELSEIF|ENDIF|FOR|ENDFOR|SEPARATOR|BEFORE|AFTER)\b',
|
961
|
+
Keyword),
|
962
|
+
(r'(def|abstract|const|enum|extends|final|implements|native|private|'
|
963
|
+
r'protected|public|static|strictfp|super|synchronized|throws|'
|
964
|
+
r'transient|volatile)\b', Keyword.Declaration),
|
965
|
+
(r'(boolean|byte|char|double|float|int|long|short|void)\b',
|
966
|
+
Keyword.Type),
|
967
|
+
(r'(package)(\s+)', bygroups(Keyword.Namespace, Text)),
|
968
|
+
(r'(true|false|null)\b', Keyword.Constant),
|
969
|
+
(r'(class|interface)(\s+)', bygroups(Keyword.Declaration, Text),
|
970
|
+
'class'),
|
971
|
+
(r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
|
972
|
+
(r"(''')", String, 'template'),
|
973
|
+
(ur"(\u00BB)", String, 'template'),
|
974
|
+
(r'"(\\\\|\\"|[^"])*"', String),
|
975
|
+
(r"'(\\\\|\\'|[^'])*'", String),
|
976
|
+
(r'[a-zA-Z_][a-zA-Z0-9_]*:', Name.Label),
|
977
|
+
(r'[a-zA-Z_\$][a-zA-Z0-9_]*', Name),
|
978
|
+
(r'[~\^\*!%&\[\]\(\)\{\}<>\|+=:;,./?-]', Operator),
|
979
|
+
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
|
980
|
+
(r'0x[0-9a-fA-F]+', Number.Hex),
|
981
|
+
(r'[0-9]+L?', Number.Integer),
|
982
|
+
(r'\n', Text)
|
983
|
+
],
|
984
|
+
'class': [
|
985
|
+
(r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
|
986
|
+
],
|
987
|
+
'import': [
|
988
|
+
(r'[a-zA-Z0-9_.]+\*?', Name.Namespace, '#pop')
|
989
|
+
],
|
990
|
+
'template': [
|
991
|
+
(r"'''", String, '#pop'),
|
992
|
+
(ur"\u00AB", String, '#pop'),
|
993
|
+
(r'.', String)
|
994
|
+
],
|
995
|
+
}
|
@@ -18,13 +18,20 @@ from pygments.token import Comment, String, Punctuation, Keyword, Name, \
|
|
18
18
|
|
19
19
|
from pygments.lexers.agile import PythonLexer
|
20
20
|
from pygments.lexers import _scilab_builtins
|
21
|
+
from pygments.lexers import _stan_builtins
|
21
22
|
|
22
23
|
__all__ = ['JuliaLexer', 'JuliaConsoleLexer', 'MuPADLexer', 'MatlabLexer',
|
23
24
|
'MatlabSessionLexer', 'OctaveLexer', 'ScilabLexer', 'NumPyLexer',
|
24
|
-
'RConsoleLexer', 'SLexer'
|
25
|
+
'RConsoleLexer', 'SLexer', 'JagsLexer', 'BugsLexer', 'StanLexer',
|
26
|
+
'RdLexer']
|
25
27
|
|
26
28
|
|
27
29
|
class JuliaLexer(RegexLexer):
|
30
|
+
"""
|
31
|
+
For `Julia <http://julialang.org/>`_ source code.
|
32
|
+
|
33
|
+
*New in Pygments 1.6.*
|
34
|
+
"""
|
28
35
|
name = 'Julia'
|
29
36
|
aliases = ['julia','jl']
|
30
37
|
filenames = ['*.jl']
|
@@ -79,7 +86,7 @@ class JuliaLexer(RegexLexer):
|
|
79
86
|
(r"'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,3}|\\u[a-fA-F0-9]{1,4}|\\U[a-fA-F0-9]{1,6}|[^\\\'\n])'", String.Char),
|
80
87
|
|
81
88
|
# try to match trailing transpose
|
82
|
-
(r'(?<=[.\w\)\]])\'', Operator),
|
89
|
+
(r'(?<=[.\w\)\]])\'+', Operator),
|
83
90
|
|
84
91
|
# strings
|
85
92
|
(r'(?:[IL])"', String, 'string'),
|
@@ -90,10 +97,11 @@ class JuliaLexer(RegexLexer):
|
|
90
97
|
(r'[a-zA-Z_][a-zA-Z0-9_]*', Name),
|
91
98
|
|
92
99
|
# numbers
|
93
|
-
(r'(\d+\.\d*|\d*\.\d+)([
|
94
|
-
(r'\d+[
|
95
|
-
(r'
|
96
|
-
(r'0
|
100
|
+
(r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
|
101
|
+
(r'\d+[eEf][+-]?[0-9]+', Number.Float),
|
102
|
+
(r'0b[01]+', Number.Binary),
|
103
|
+
(r'0o[0-7]+', Number.Oct),
|
104
|
+
(r'0x[a-fA-F0-9]+', Number.Hex),
|
97
105
|
(r'\d+', Number.Integer)
|
98
106
|
],
|
99
107
|
|
@@ -134,6 +142,8 @@ line_re = re.compile('.*?\n')
|
|
134
142
|
class JuliaConsoleLexer(Lexer):
|
135
143
|
"""
|
136
144
|
For Julia console sessions. Modeled after MatlabSessionLexer.
|
145
|
+
|
146
|
+
*New in Pygments 1.6.*
|
137
147
|
"""
|
138
148
|
name = 'Julia console'
|
139
149
|
aliases = ['jlcon']
|
@@ -251,7 +261,6 @@ class MuPADLexer(RegexLexer):
|
|
251
261
|
class MatlabLexer(RegexLexer):
|
252
262
|
"""
|
253
263
|
For Matlab source code.
|
254
|
-
Contributed by Ken Schutte <kschutte@csail.mit.edu>.
|
255
264
|
|
256
265
|
*New in Pygments 0.10.*
|
257
266
|
"""
|
@@ -306,6 +315,7 @@ class MatlabLexer(RegexLexer):
|
|
306
315
|
# line starting with '!' is sent as a system command. not sure what
|
307
316
|
# label to use...
|
308
317
|
(r'^!.*', String.Other),
|
318
|
+
(r'%\{\s*\n', Comment.Multiline, 'blockcomment'),
|
309
319
|
(r'%.*$', Comment),
|
310
320
|
(r'^\s*function', Keyword, 'deffunc'),
|
311
321
|
|
@@ -316,6 +326,9 @@ class MatlabLexer(RegexLexer):
|
|
316
326
|
|
317
327
|
("(" + "|".join(elfun+specfun+elmat) + r')\b', Name.Builtin),
|
318
328
|
|
329
|
+
# line continuation with following comment:
|
330
|
+
(r'\.\.\..*$', Comment),
|
331
|
+
|
319
332
|
# operators:
|
320
333
|
(r'-|==|~=|<|>|<=|>=|&&|&|~|\|\|?', Operator),
|
321
334
|
# operators requiring escape for re:
|
@@ -336,6 +349,11 @@ class MatlabLexer(RegexLexer):
|
|
336
349
|
'string': [
|
337
350
|
(r'[^\']*\'', String, '#pop')
|
338
351
|
],
|
352
|
+
'blockcomment': [
|
353
|
+
(r'^\s*%\}', Comment.Multiline, '#pop'),
|
354
|
+
(r'^.*\n', Comment.Multiline),
|
355
|
+
(r'.', Comment.Multiline),
|
356
|
+
],
|
339
357
|
'deffunc': [
|
340
358
|
(r'(\s*)(?:(.+)(\s*)(=)(\s*))?(.+)(\()(.*)(\))(\s*)',
|
341
359
|
bygroups(Text.Whitespace, Text, Text.Whitespace, Punctuation,
|
@@ -1002,43 +1020,50 @@ class SLexer(RegexLexer):
|
|
1002
1020
|
|
1003
1021
|
name = 'S'
|
1004
1022
|
aliases = ['splus', 's', 'r']
|
1005
|
-
filenames = ['*.S', '*.R']
|
1006
|
-
mimetypes = ['text/S-plus', 'text/S', 'text/
|
1023
|
+
filenames = ['*.S', '*.R', '.Rhistory', '.Rprofile']
|
1024
|
+
mimetypes = ['text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r',
|
1025
|
+
'text/x-R', 'text/x-r-history', 'text/x-r-profile']
|
1007
1026
|
|
1008
1027
|
tokens = {
|
1009
1028
|
'comments': [
|
1010
1029
|
(r'#.*$', Comment.Single),
|
1011
1030
|
],
|
1012
1031
|
'valid_name': [
|
1013
|
-
(r'[a-zA-Z][0-9a-zA-Z\._]
|
1014
|
-
|
1032
|
+
(r'[a-zA-Z][0-9a-zA-Z\._]*', Text),
|
1033
|
+
# can begin with ., but not if that is followed by a digit
|
1034
|
+
(r'\.[a-zA-Z_][0-9a-zA-Z\._]*', Text),
|
1015
1035
|
],
|
1016
1036
|
'punctuation': [
|
1017
|
-
(r'\[|\]|\
|
1037
|
+
(r'\[{1,2}|\]{1,2}|\(|\)|;|,', Punctuation),
|
1018
1038
|
],
|
1019
1039
|
'keywords': [
|
1020
|
-
(r'
|
1021
|
-
r'(
|
1040
|
+
(r'(if|else|for|while|repeat|in|next|break|return|switch|function)'
|
1041
|
+
r'(?![0-9a-zA-Z\._])',
|
1022
1042
|
Keyword.Reserved)
|
1023
1043
|
],
|
1024
1044
|
'operators': [
|
1025
|
-
(r'
|
1026
|
-
(r'
|
1027
|
-
(r'%in%|%*%', Operator)
|
1045
|
+
(r'<<?-|->>?|-|==|<=|>=|<|>|&&?|!=|\|\|?|\?', Operator),
|
1046
|
+
(r'\*|\+|\^|/|!|%[^%]*%|=|~|\$|@|:{1,3}', Operator)
|
1028
1047
|
],
|
1029
1048
|
'builtin_symbols': [
|
1030
|
-
(r'(NULL|NA|
|
1049
|
+
(r'(NULL|NA(_(integer|real|complex|character)_)?|'
|
1050
|
+
r'Inf|TRUE|FALSE|NaN|\.\.(\.|[0-9]+))'
|
1051
|
+
r'(?![0-9a-zA-Z\._])',
|
1052
|
+
Keyword.Constant),
|
1031
1053
|
(r'(T|F)\b', Keyword.Variable),
|
1032
1054
|
],
|
1033
1055
|
'numbers': [
|
1034
|
-
|
1035
|
-
|
1036
|
-
|
1056
|
+
# hex number
|
1057
|
+
(r'0[xX][a-fA-F0-9]+([pP][0-9]+)?[Li]?', Number.Hex),
|
1058
|
+
# decimal number
|
1059
|
+
(r'[+-]?([0-9]+(\.[0-9]+)?|\.[0-9]+)([eE][+-]?[0-9]+)?[Li]?',
|
1060
|
+
Number),
|
1037
1061
|
],
|
1038
1062
|
'statements': [
|
1039
1063
|
include('comments'),
|
1040
1064
|
# whitespaces
|
1041
1065
|
(r'\s+', Text),
|
1066
|
+
(r'`.*?`', String.Backtick),
|
1042
1067
|
(r'\'', String, 'string_squote'),
|
1043
1068
|
(r'\"', String, 'string_dquote'),
|
1044
1069
|
include('builtin_symbols'),
|
@@ -1061,12 +1086,313 @@ class SLexer(RegexLexer):
|
|
1061
1086
|
# ('\}', Punctuation, '#pop')
|
1062
1087
|
#],
|
1063
1088
|
'string_squote': [
|
1064
|
-
(r'[^\']*\'', String, '#pop'),
|
1089
|
+
(r'([^\'\\]|\\.)*\'', String, '#pop'),
|
1065
1090
|
],
|
1066
1091
|
'string_dquote': [
|
1067
|
-
(r'[
|
1092
|
+
(r'([^"\\]|\\.)*"', String, '#pop'),
|
1068
1093
|
],
|
1069
1094
|
}
|
1070
1095
|
|
1071
1096
|
def analyse_text(text):
|
1072
1097
|
return '<-' in text
|
1098
|
+
|
1099
|
+
|
1100
|
+
class BugsLexer(RegexLexer):
|
1101
|
+
"""
|
1102
|
+
Pygments Lexer for `OpenBugs <http://www.openbugs.info/w/>`_ and WinBugs
|
1103
|
+
models.
|
1104
|
+
|
1105
|
+
*New in Pygments 1.6.*
|
1106
|
+
"""
|
1107
|
+
|
1108
|
+
name = 'BUGS'
|
1109
|
+
aliases = ['bugs', 'winbugs', 'openbugs']
|
1110
|
+
filenames = ['*.bug']
|
1111
|
+
|
1112
|
+
_FUNCTIONS = [
|
1113
|
+
# Scalar functions
|
1114
|
+
'abs', 'arccos', 'arccosh', 'arcsin', 'arcsinh', 'arctan', 'arctanh',
|
1115
|
+
'cloglog', 'cos', 'cosh', 'cumulative', 'cut', 'density', 'deviance',
|
1116
|
+
'equals', 'expr', 'gammap', 'ilogit', 'icloglog', 'integral', 'log',
|
1117
|
+
'logfact', 'loggam', 'logit', 'max', 'min', 'phi', 'post.p.value',
|
1118
|
+
'pow', 'prior.p.value', 'probit', 'replicate.post', 'replicate.prior',
|
1119
|
+
'round', 'sin', 'sinh', 'solution', 'sqrt', 'step', 'tan', 'tanh',
|
1120
|
+
'trunc',
|
1121
|
+
# Vector functions
|
1122
|
+
'inprod', 'interp.lin', 'inverse', 'logdet', 'mean', 'eigen.vals',
|
1123
|
+
'ode', 'prod', 'p.valueM', 'rank', 'ranked', 'replicate.postM',
|
1124
|
+
'sd', 'sort', 'sum',
|
1125
|
+
## Special
|
1126
|
+
'D', 'I', 'F', 'T', 'C']
|
1127
|
+
""" OpenBUGS built-in functions
|
1128
|
+
|
1129
|
+
From http://www.openbugs.info/Manuals/ModelSpecification.html#ContentsAII
|
1130
|
+
|
1131
|
+
This also includes
|
1132
|
+
|
1133
|
+
- T, C, I : Truncation and censoring.
|
1134
|
+
``T`` and ``C`` are in OpenBUGS. ``I`` in WinBUGS.
|
1135
|
+
- D : ODE
|
1136
|
+
- F : Functional http://www.openbugs.info/Examples/Functionals.html
|
1137
|
+
|
1138
|
+
"""
|
1139
|
+
|
1140
|
+
_DISTRIBUTIONS = ['dbern', 'dbin', 'dcat', 'dnegbin', 'dpois',
|
1141
|
+
'dhyper', 'dbeta', 'dchisqr', 'ddexp', 'dexp',
|
1142
|
+
'dflat', 'dgamma', 'dgev', 'df', 'dggamma', 'dgpar',
|
1143
|
+
'dloglik', 'dlnorm', 'dlogis', 'dnorm', 'dpar',
|
1144
|
+
'dt', 'dunif', 'dweib', 'dmulti', 'ddirch', 'dmnorm',
|
1145
|
+
'dmt', 'dwish']
|
1146
|
+
""" OpenBUGS built-in distributions
|
1147
|
+
|
1148
|
+
Functions from
|
1149
|
+
http://www.openbugs.info/Manuals/ModelSpecification.html#ContentsAI
|
1150
|
+
"""
|
1151
|
+
|
1152
|
+
|
1153
|
+
tokens = {
|
1154
|
+
'whitespace' : [
|
1155
|
+
(r"\s+", Text),
|
1156
|
+
],
|
1157
|
+
'comments' : [
|
1158
|
+
# Comments
|
1159
|
+
(r'#.*$', Comment.Single),
|
1160
|
+
],
|
1161
|
+
'root': [
|
1162
|
+
# Comments
|
1163
|
+
include('comments'),
|
1164
|
+
include('whitespace'),
|
1165
|
+
# Block start
|
1166
|
+
(r'(model)(\s+)({)',
|
1167
|
+
bygroups(Keyword.Namespace, Text, Punctuation)),
|
1168
|
+
# Reserved Words
|
1169
|
+
(r'(for|in)(?![0-9a-zA-Z\._])', Keyword.Reserved),
|
1170
|
+
# Built-in Functions
|
1171
|
+
(r'(%s)(?=\s*\()'
|
1172
|
+
% r'|'.join(_FUNCTIONS + _DISTRIBUTIONS),
|
1173
|
+
Name.Builtin),
|
1174
|
+
# Regular variable names
|
1175
|
+
(r'[A-Za-z][A-Za-z0-9_.]*', Name),
|
1176
|
+
# Number Literals
|
1177
|
+
(r'[-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?', Number),
|
1178
|
+
# Punctuation
|
1179
|
+
(r'\[|\]|\(|\)|:|,|;', Punctuation),
|
1180
|
+
# Assignment operators
|
1181
|
+
# SLexer makes these tokens Operators.
|
1182
|
+
(r'<-|~', Operator),
|
1183
|
+
# Infix and prefix operators
|
1184
|
+
(r'\+|-|\*|/', Operator),
|
1185
|
+
# Block
|
1186
|
+
(r'[{}]', Punctuation),
|
1187
|
+
]
|
1188
|
+
}
|
1189
|
+
|
1190
|
+
def analyse_text(text):
|
1191
|
+
if re.search(r"^\s*model\s*{", text, re.M):
|
1192
|
+
return 0.7
|
1193
|
+
else:
|
1194
|
+
return 0.0
|
1195
|
+
|
1196
|
+
class JagsLexer(RegexLexer):
|
1197
|
+
"""
|
1198
|
+
Pygments Lexer for JAGS.
|
1199
|
+
|
1200
|
+
*New in Pygments 1.6.*
|
1201
|
+
"""
|
1202
|
+
|
1203
|
+
name = 'JAGS'
|
1204
|
+
aliases = ['jags']
|
1205
|
+
filenames = ['*.jag', '*.bug']
|
1206
|
+
|
1207
|
+
## JAGS
|
1208
|
+
_FUNCTIONS = [
|
1209
|
+
'abs', 'arccos', 'arccosh', 'arcsin', 'arcsinh', 'arctan', 'arctanh',
|
1210
|
+
'cos', 'cosh', 'cloglog',
|
1211
|
+
'equals', 'exp', 'icloglog', 'ifelse', 'ilogit', 'log', 'logfact',
|
1212
|
+
'loggam', 'logit', 'phi', 'pow', 'probit', 'round', 'sin', 'sinh',
|
1213
|
+
'sqrt', 'step', 'tan', 'tanh', 'trunc', 'inprod', 'interp.lin',
|
1214
|
+
'logdet', 'max', 'mean', 'min', 'prod', 'sum', 'sd', 'inverse',
|
1215
|
+
'rank', 'sort', 't', 'acos', 'acosh', 'asin', 'asinh', 'atan',
|
1216
|
+
# Truncation/Censoring (should I include)
|
1217
|
+
'T', 'I']
|
1218
|
+
# Distributions with density, probability and quartile functions
|
1219
|
+
_DISTRIBUTIONS = ['[dpq]%s' % x for x in
|
1220
|
+
['bern', 'beta', 'dchiqsqr', 'ddexp', 'dexp',
|
1221
|
+
'df', 'gamma', 'gen.gamma', 'logis', 'lnorm',
|
1222
|
+
'negbin', 'nchisqr', 'norm', 'par', 'pois', 'weib']]
|
1223
|
+
# Other distributions without density and probability
|
1224
|
+
_OTHER_DISTRIBUTIONS = [
|
1225
|
+
'dt', 'dunif', 'dbetabin', 'dbern', 'dbin', 'dcat', 'dhyper',
|
1226
|
+
'ddirch', 'dmnorm', 'dwish', 'dmt', 'dmulti', 'dbinom', 'dchisq',
|
1227
|
+
'dnbinom', 'dweibull', 'ddirich']
|
1228
|
+
|
1229
|
+
tokens = {
|
1230
|
+
'whitespace' : [
|
1231
|
+
(r"\s+", Text),
|
1232
|
+
],
|
1233
|
+
'names' : [
|
1234
|
+
# Regular variable names
|
1235
|
+
(r'[a-zA-Z][a-zA-Z0-9_.]*\b', Name),
|
1236
|
+
],
|
1237
|
+
'comments' : [
|
1238
|
+
# do not use stateful comments
|
1239
|
+
(r'(?s)/\*.*?\*/', Comment.Multiline),
|
1240
|
+
# Comments
|
1241
|
+
(r'#.*$', Comment.Single),
|
1242
|
+
],
|
1243
|
+
'root': [
|
1244
|
+
# Comments
|
1245
|
+
include('comments'),
|
1246
|
+
include('whitespace'),
|
1247
|
+
# Block start
|
1248
|
+
(r'(model|data)(\s+)({)',
|
1249
|
+
bygroups(Keyword.Namespace, Text, Punctuation)),
|
1250
|
+
(r'var(?![0-9a-zA-Z\._])', Keyword.Declaration),
|
1251
|
+
# Reserved Words
|
1252
|
+
(r'(for|in)(?![0-9a-zA-Z\._])', Keyword.Reserved),
|
1253
|
+
# Builtins
|
1254
|
+
# Need to use lookahead because . is a valid char
|
1255
|
+
(r'(%s)(?=\s*\()' % r'|'.join(_FUNCTIONS
|
1256
|
+
+ _DISTRIBUTIONS
|
1257
|
+
+ _OTHER_DISTRIBUTIONS),
|
1258
|
+
Name.Builtin),
|
1259
|
+
# Names
|
1260
|
+
include('names'),
|
1261
|
+
# Number Literals
|
1262
|
+
(r'[-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?', Number),
|
1263
|
+
(r'\[|\]|\(|\)|:|,|;', Punctuation),
|
1264
|
+
# Assignment operators
|
1265
|
+
(r'<-|~', Operator),
|
1266
|
+
# # JAGS includes many more than OpenBUGS
|
1267
|
+
(r'\+|-|\*|\/|\|\|[&]{2}|[<>=]=?|\^|%.*?%', Operator),
|
1268
|
+
(r'[{}]', Punctuation),
|
1269
|
+
]
|
1270
|
+
}
|
1271
|
+
|
1272
|
+
def analyse_text(text):
|
1273
|
+
if re.search(r'^\s*model\s*\{', text, re.M):
|
1274
|
+
if re.search(r'^\s*data\s*\{', text, re.M):
|
1275
|
+
return 0.9
|
1276
|
+
elif re.search(r'^\s*var', text, re.M):
|
1277
|
+
return 0.9
|
1278
|
+
else:
|
1279
|
+
return 0.3
|
1280
|
+
else:
|
1281
|
+
return 0
|
1282
|
+
|
1283
|
+
class StanLexer(RegexLexer):
|
1284
|
+
"""
|
1285
|
+
Pygments Lexer for Stan models.
|
1286
|
+
|
1287
|
+
*New in Pygments 1.6.*
|
1288
|
+
"""
|
1289
|
+
|
1290
|
+
name = 'Stan'
|
1291
|
+
aliases = ['stan']
|
1292
|
+
filenames = ['*.stan']
|
1293
|
+
|
1294
|
+
_RESERVED = ('for', 'in', 'while', 'repeat', 'until', 'if',
|
1295
|
+
'then', 'else', 'true', 'false', 'T',
|
1296
|
+
'lower', 'upper', 'print')
|
1297
|
+
|
1298
|
+
_TYPES = ('int', 'real', 'vector', 'simplex', 'ordered', 'row_vector',
|
1299
|
+
'matrix', 'corr_matrix', 'cov_matrix', 'positive_ordered')
|
1300
|
+
|
1301
|
+
tokens = {
|
1302
|
+
'whitespace' : [
|
1303
|
+
(r"\s+", Text),
|
1304
|
+
],
|
1305
|
+
'comments' : [
|
1306
|
+
(r'(?s)/\*.*?\*/', Comment.Multiline),
|
1307
|
+
# Comments
|
1308
|
+
(r'(//|#).*$', Comment.Single),
|
1309
|
+
],
|
1310
|
+
'root': [
|
1311
|
+
# Stan is more restrictive on strings than this regex
|
1312
|
+
(r'"[^"]*"', String),
|
1313
|
+
# Comments
|
1314
|
+
include('comments'),
|
1315
|
+
# block start
|
1316
|
+
include('whitespace'),
|
1317
|
+
# Block start
|
1318
|
+
(r'(%s)(\s*)({)' %
|
1319
|
+
r'|'.join(('data', r'transformed\s+?data',
|
1320
|
+
'parameters', r'transformed\s+parameters',
|
1321
|
+
'model', r'generated\s+quantities')),
|
1322
|
+
bygroups(Keyword.Namespace, Text, Punctuation)),
|
1323
|
+
# Reserved Words
|
1324
|
+
(r'(%s)\b' % r'|'.join(_RESERVED), Keyword.Reserved),
|
1325
|
+
# Data types
|
1326
|
+
(r'(%s)\b' % r'|'.join(_TYPES), Keyword.Type),
|
1327
|
+
# Punctuation
|
1328
|
+
(r"[;:,\[\]()<>]", Punctuation),
|
1329
|
+
# Builtin
|
1330
|
+
(r'(%s)(?=\s*\()'
|
1331
|
+
% r'|'.join(_stan_builtins.FUNCTIONS
|
1332
|
+
+ _stan_builtins.DISTRIBUTIONS),
|
1333
|
+
Name.Builtin),
|
1334
|
+
(r'(%s)(?=\s*\()'
|
1335
|
+
% r'|'.join(_stan_builtins.CONSTANTS), Keyword.Constant),
|
1336
|
+
# Special names ending in __, like lp__
|
1337
|
+
(r'[A-Za-z][A-Za-z0-9_]*__\b', Name.Builtin.Pseudo),
|
1338
|
+
# Regular variable names
|
1339
|
+
(r'[A-Za-z][A-Za-z0-9_]*\b', Name),
|
1340
|
+
# Real Literals
|
1341
|
+
(r'-?[0-9]+(\.[0-9]+)?[eE]-?[0-9]+', Number.Float),
|
1342
|
+
(r'-?[0-9]*\.[0-9]*', Number.Float),
|
1343
|
+
# Integer Literals
|
1344
|
+
(r'-?[0-9]+', Number.Integer),
|
1345
|
+
# Assignment operators
|
1346
|
+
# SLexer makes these tokens Operators.
|
1347
|
+
(r'<-|~', Operator),
|
1348
|
+
# Infix and prefix operators (and = )
|
1349
|
+
(r"\+|-|\.?\*|\.?/|\\|'|=", Operator),
|
1350
|
+
# Block delimiters
|
1351
|
+
(r'[{}]', Punctuation),
|
1352
|
+
]
|
1353
|
+
}
|
1354
|
+
|
1355
|
+
def analyse_text(text):
|
1356
|
+
if re.search(r'^\s*parameters\s*\{', text, re.M):
|
1357
|
+
return 1.0
|
1358
|
+
else:
|
1359
|
+
return 0.0
|
1360
|
+
|
1361
|
+
|
1362
|
+
class RdLexer(RegexLexer):
|
1363
|
+
"""
|
1364
|
+
Pygments Lexer for R documentation (Rd) files
|
1365
|
+
|
1366
|
+
This is a very minimal implementation, highlighting little more
|
1367
|
+
than the macros. A description of Rd syntax is found in `Writing R
|
1368
|
+
Extensions <http://cran.r-project.org/doc/manuals/R-exts.html>`_
|
1369
|
+
and `Parsing Rd files <developer.r-project.org/parseRd.pdf>`_.
|
1370
|
+
|
1371
|
+
*New in Pygments 1.6.*
|
1372
|
+
"""
|
1373
|
+
name = 'Rd'
|
1374
|
+
aliases = ['rd']
|
1375
|
+
filenames = ['*.Rd']
|
1376
|
+
mimetypes = ['text/x-r-doc']
|
1377
|
+
|
1378
|
+
# To account for verbatim / LaTeX-like / and R-like areas
|
1379
|
+
# would require parsing.
|
1380
|
+
tokens = {
|
1381
|
+
'root' : [
|
1382
|
+
# catch escaped brackets and percent sign
|
1383
|
+
(r'\\[\\{}%]', String.Escape),
|
1384
|
+
# comments
|
1385
|
+
(r'%.*$', Comment),
|
1386
|
+
# special macros with no arguments
|
1387
|
+
(r'\\(?:cr|l?dots|R|tab)\b', Keyword.Constant),
|
1388
|
+
# macros
|
1389
|
+
(r'\\[a-zA-Z]+\b', Keyword),
|
1390
|
+
# special preprocessor macros
|
1391
|
+
(r'^\s*#(?:ifn?def|endif).*\b', Comment.Preproc),
|
1392
|
+
# non-escaped brackets
|
1393
|
+
(r'[{}]', Name.Builtin),
|
1394
|
+
# everything else
|
1395
|
+
(r'[^\\%\n{}]+', Text),
|
1396
|
+
(r'.', Text),
|
1397
|
+
]
|
1398
|
+
}
|