pygments.rb 2.2.0 → 2.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (102) hide show
  1. checksums.yaml +4 -4
  2. data/.github/workflows/ci.yml +1 -1
  3. data/CHANGELOG.adoc +5 -0
  4. data/bench.rb +2 -2
  5. data/lib/pygments/lexer.rb +1 -1
  6. data/lib/pygments/mentos.py +6 -4
  7. data/lib/pygments/popen.rb +14 -7
  8. data/lib/pygments/version.rb +1 -1
  9. data/lib/pygments.rb +8 -2
  10. data/pygments.rb.gemspec +4 -4
  11. data/test/test_pygments.rb +13 -7
  12. data/vendor/pygments-main/{Pygments-2.8.1.dist-info → Pygments-2.10.0.dist-info}/AUTHORS +8 -1
  13. data/vendor/pygments-main/{Pygments-2.8.1.dist-info → Pygments-2.10.0.dist-info}/INSTALLER +0 -0
  14. data/vendor/pygments-main/{Pygments-2.8.1.dist-info → Pygments-2.10.0.dist-info}/LICENSE +0 -0
  15. data/vendor/pygments-main/{Pygments-2.8.1.dist-info → Pygments-2.10.0.dist-info}/METADATA +1 -1
  16. data/vendor/pygments-main/Pygments-2.10.0.dist-info/RECORD +524 -0
  17. data/vendor/pygments-main/{Pygments-2.8.1.dist-info → Pygments-2.10.0.dist-info}/REQUESTED +0 -0
  18. data/vendor/pygments-main/{Pygments-2.8.1.dist-info → Pygments-2.10.0.dist-info}/WHEEL +0 -0
  19. data/vendor/pygments-main/{Pygments-2.8.1.dist-info → Pygments-2.10.0.dist-info}/entry_points.txt +0 -0
  20. data/vendor/pygments-main/{Pygments-2.8.1.dist-info → Pygments-2.10.0.dist-info}/top_level.txt +0 -0
  21. data/vendor/pygments-main/bin/pygmentize +1 -1
  22. data/vendor/pygments-main/pygments/__init__.py +1 -1
  23. data/vendor/pygments-main/pygments/formatters/_mapping.py +1 -0
  24. data/vendor/pygments-main/pygments/formatters/html.py +20 -7
  25. data/vendor/pygments-main/pygments/formatters/pangomarkup.py +83 -0
  26. data/vendor/pygments-main/pygments/lexers/_csound_builtins.py +55 -14
  27. data/vendor/pygments-main/pygments/lexers/_julia_builtins.py +401 -0
  28. data/vendor/pygments-main/pygments/lexers/_mapping.py +68 -52
  29. data/vendor/pygments-main/pygments/lexers/actionscript.py +2 -2
  30. data/vendor/pygments-main/pygments/lexers/ambient.py +1 -1
  31. data/vendor/pygments-main/pygments/lexers/amdgpu.py +9 -4
  32. data/vendor/pygments-main/pygments/lexers/apdlexer.py +448 -0
  33. data/vendor/pygments-main/pygments/lexers/apl.py +6 -3
  34. data/vendor/pygments-main/pygments/lexers/asc.py +51 -0
  35. data/vendor/pygments-main/pygments/lexers/asm.py +86 -58
  36. data/vendor/pygments-main/pygments/lexers/automation.py +1 -1
  37. data/vendor/pygments-main/pygments/lexers/bibtex.py +2 -2
  38. data/vendor/pygments-main/pygments/lexers/c_cpp.py +60 -33
  39. data/vendor/pygments-main/pygments/lexers/c_like.py +102 -3
  40. data/vendor/pygments-main/pygments/lexers/cddl.py +2 -2
  41. data/vendor/pygments-main/pygments/lexers/chapel.py +53 -29
  42. data/vendor/pygments-main/pygments/lexers/clean.py +6 -6
  43. data/vendor/pygments-main/pygments/lexers/configs.py +133 -43
  44. data/vendor/pygments-main/pygments/lexers/csound.py +2 -2
  45. data/vendor/pygments-main/pygments/lexers/devicetree.py +1 -1
  46. data/vendor/pygments-main/pygments/lexers/dotnet.py +1 -1
  47. data/vendor/pygments-main/pygments/lexers/erlang.py +1 -1
  48. data/vendor/pygments-main/pygments/lexers/floscript.py +1 -1
  49. data/vendor/pygments-main/pygments/lexers/futhark.py +5 -4
  50. data/vendor/pygments-main/pygments/lexers/gcodelexer.py +36 -0
  51. data/vendor/pygments-main/pygments/lexers/go.py +1 -1
  52. data/vendor/pygments-main/pygments/lexers/graphics.py +1 -1
  53. data/vendor/pygments-main/pygments/lexers/graphviz.py +5 -4
  54. data/vendor/pygments-main/pygments/lexers/gsql.py +92 -0
  55. data/vendor/pygments-main/pygments/lexers/haskell.py +4 -4
  56. data/vendor/pygments-main/pygments/lexers/haxe.py +1 -1
  57. data/vendor/pygments-main/pygments/lexers/installers.py +2 -2
  58. data/vendor/pygments-main/pygments/lexers/javascript.py +145 -127
  59. data/vendor/pygments-main/pygments/lexers/jslt.py +94 -0
  60. data/vendor/pygments-main/pygments/lexers/julia.py +135 -183
  61. data/vendor/pygments-main/pygments/lexers/jvm.py +262 -202
  62. data/vendor/pygments-main/pygments/lexers/kuin.py +299 -0
  63. data/vendor/pygments-main/pygments/lexers/lisp.py +18 -15
  64. data/vendor/pygments-main/pygments/lexers/markup.py +3 -3
  65. data/vendor/pygments-main/pygments/lexers/matlab.py +23 -10
  66. data/vendor/pygments-main/pygments/lexers/meson.py +155 -0
  67. data/vendor/pygments-main/pygments/lexers/mime.py +6 -21
  68. data/vendor/pygments-main/pygments/lexers/ml.py +1 -1
  69. data/vendor/pygments-main/pygments/lexers/nimrod.py +1 -1
  70. data/vendor/pygments-main/pygments/lexers/objective.py +3 -3
  71. data/vendor/pygments-main/pygments/lexers/parsers.py +1 -1
  72. data/vendor/pygments-main/pygments/lexers/procfile.py +43 -0
  73. data/vendor/pygments-main/pygments/lexers/promql.py +1 -1
  74. data/vendor/pygments-main/pygments/lexers/python.py +19 -5
  75. data/vendor/pygments-main/pygments/lexers/resource.py +1 -1
  76. data/vendor/pygments-main/pygments/lexers/rnc.py +1 -1
  77. data/vendor/pygments-main/pygments/lexers/ruby.py +1 -1
  78. data/vendor/pygments-main/pygments/lexers/rust.py +9 -10
  79. data/vendor/pygments-main/pygments/lexers/scripting.py +7 -7
  80. data/vendor/pygments-main/pygments/lexers/shell.py +11 -8
  81. data/vendor/pygments-main/pygments/lexers/smithy.py +79 -0
  82. data/vendor/pygments-main/pygments/lexers/smv.py +1 -1
  83. data/vendor/pygments-main/pygments/lexers/special.py +15 -2
  84. data/vendor/pygments-main/pygments/lexers/supercollider.py +1 -1
  85. data/vendor/pygments-main/pygments/lexers/tcl.py +3 -3
  86. data/vendor/pygments-main/pygments/lexers/teal.py +88 -0
  87. data/vendor/pygments-main/pygments/lexers/templates.py +19 -19
  88. data/vendor/pygments-main/pygments/lexers/teraterm.py +1 -1
  89. data/vendor/pygments-main/pygments/lexers/testing.py +1 -1
  90. data/vendor/pygments-main/pygments/lexers/theorem.py +4 -2
  91. data/vendor/pygments-main/pygments/lexers/thingsdb.py +118 -0
  92. data/vendor/pygments-main/pygments/lexers/tnt.py +25 -15
  93. data/vendor/pygments-main/pygments/lexers/trafficscript.py +1 -1
  94. data/vendor/pygments-main/pygments/lexers/webassembly.py +120 -0
  95. data/vendor/pygments-main/pygments/regexopt.py +1 -1
  96. data/vendor/pygments-main/pygments/style.py +1 -1
  97. data/vendor/pygments-main/pygments/styles/__init__.py +2 -0
  98. data/vendor/pygments-main/pygments/styles/friendly.py +1 -0
  99. data/vendor/pygments-main/pygments/styles/gruvbox.py +107 -0
  100. data/vendor/pygments-main/pygments/styles/native.py +1 -0
  101. metadata +29 -14
  102. data/vendor/pygments-main/Pygments-2.8.1.dist-info/RECORD +0 -494
@@ -267,11 +267,11 @@ class VelocityLexer(RegexLexer):
267
267
 
268
268
  def analyse_text(text):
269
269
  rv = 0.0
270
- if re.search(r'#\{?macro\}?\(.*?\).*?#\{?end\}?', text):
270
+ if re.search(r'#\{?macro\}?\(.*?\).*?#\{?end\}?', text, re.DOTALL):
271
271
  rv += 0.25
272
- if re.search(r'#\{?if\}?\(.+?\).*?#\{?end\}?', text):
272
+ if re.search(r'#\{?if\}?\(.+?\).*?#\{?end\}?', text, re.DOTALL):
273
273
  rv += 0.15
274
- if re.search(r'#\{?foreach\}?\(.+?\).*?#\{?end\}?', text):
274
+ if re.search(r'#\{?foreach\}?\(.+?\).*?#\{?end\}?', text, re.DOTALL):
275
275
  rv += 0.15
276
276
  if re.search(r'\$!?\{?[a-zA-Z_]\w*(\([^)]*\))?'
277
277
  r'(\.\w+(\([^)]*\))?)*\}?', text):
@@ -489,7 +489,7 @@ class MyghtyJavascriptLexer(DelegatingLexer):
489
489
  """
490
490
 
491
491
  name = 'JavaScript+Myghty'
492
- aliases = ['js+myghty', 'javascript+myghty']
492
+ aliases = ['javascript+myghty', 'js+myghty']
493
493
  mimetypes = ['application/x-javascript+myghty',
494
494
  'text/x-javascript+myghty',
495
495
  'text/javascript+mygthy']
@@ -588,12 +588,12 @@ class MakoLexer(RegexLexer):
588
588
  tokens = {
589
589
  'root': [
590
590
  (r'(\s*)(%)(\s*end(?:\w+))(\n|\Z)',
591
- bygroups(Text, Comment.Preproc, Keyword, Other)),
591
+ bygroups(Text.Whitespace, Comment.Preproc, Keyword, Other)),
592
592
  (r'(\s*)(%)([^\n]*)(\n|\Z)',
593
- bygroups(Text, Comment.Preproc, using(PythonLexer), Other)),
593
+ bygroups(Text.Whitespace, Comment.Preproc, using(PythonLexer), Other)),
594
594
  (r'(\s*)(##[^\n]*)(\n|\Z)',
595
- bygroups(Text, Comment.Preproc, Other)),
596
- (r'(?s)<%doc>.*?</%doc>', Comment.Preproc),
595
+ bygroups(Text.Whitespace, Comment.Single, Text.Whitespace)),
596
+ (r'(?s)<%doc>.*?</%doc>', Comment.Multiline),
597
597
  (r'(<%)([\w.:]+)',
598
598
  bygroups(Comment.Preproc, Name.Builtin), 'tag'),
599
599
  (r'(</%)([\w.:]+)(>)',
@@ -679,7 +679,7 @@ class MakoJavascriptLexer(DelegatingLexer):
679
679
  """
680
680
 
681
681
  name = 'JavaScript+Mako'
682
- aliases = ['js+mako', 'javascript+mako']
682
+ aliases = ['javascript+mako', 'js+mako']
683
683
  mimetypes = ['application/x-javascript+mako',
684
684
  'text/x-javascript+mako',
685
685
  'text/javascript+mako']
@@ -798,8 +798,8 @@ class CheetahJavascriptLexer(DelegatingLexer):
798
798
  """
799
799
 
800
800
  name = 'JavaScript+Cheetah'
801
- aliases = ['js+cheetah', 'javascript+cheetah',
802
- 'js+spitfire', 'javascript+spitfire']
801
+ aliases = ['javascript+cheetah', 'js+cheetah',
802
+ 'javascript+spitfire', 'js+spitfire']
803
803
  mimetypes = ['application/x-javascript+cheetah',
804
804
  'text/x-javascript+cheetah',
805
805
  'text/javascript+cheetah',
@@ -1026,7 +1026,7 @@ class XmlErbLexer(DelegatingLexer):
1026
1026
  """
1027
1027
 
1028
1028
  name = 'XML+Ruby'
1029
- aliases = ['xml+erb', 'xml+ruby']
1029
+ aliases = ['xml+ruby', 'xml+erb']
1030
1030
  alias_filenames = ['*.xml']
1031
1031
  mimetypes = ['application/xml+ruby']
1032
1032
 
@@ -1046,7 +1046,7 @@ class CssErbLexer(DelegatingLexer):
1046
1046
  """
1047
1047
 
1048
1048
  name = 'CSS+Ruby'
1049
- aliases = ['css+erb', 'css+ruby']
1049
+ aliases = ['css+ruby', 'css+erb']
1050
1050
  alias_filenames = ['*.css']
1051
1051
  mimetypes = ['text/css+ruby']
1052
1052
 
@@ -1064,7 +1064,7 @@ class JavascriptErbLexer(DelegatingLexer):
1064
1064
  """
1065
1065
 
1066
1066
  name = 'JavaScript+Ruby'
1067
- aliases = ['js+erb', 'javascript+erb', 'js+ruby', 'javascript+ruby']
1067
+ aliases = ['javascript+ruby', 'js+ruby', 'javascript+erb', 'js+erb']
1068
1068
  alias_filenames = ['*.js']
1069
1069
  mimetypes = ['application/x-javascript+ruby',
1070
1070
  'text/x-javascript+ruby',
@@ -1147,7 +1147,7 @@ class JavascriptPhpLexer(DelegatingLexer):
1147
1147
  """
1148
1148
 
1149
1149
  name = 'JavaScript+PHP'
1150
- aliases = ['js+php', 'javascript+php']
1150
+ aliases = ['javascript+php', 'js+php']
1151
1151
  alias_filenames = ['*.js']
1152
1152
  mimetypes = ['application/x-javascript+php',
1153
1153
  'text/x-javascript+php',
@@ -1229,7 +1229,7 @@ class JavascriptSmartyLexer(DelegatingLexer):
1229
1229
  """
1230
1230
 
1231
1231
  name = 'JavaScript+Smarty'
1232
- aliases = ['js+smarty', 'javascript+smarty']
1232
+ aliases = ['javascript+smarty', 'js+smarty']
1233
1233
  alias_filenames = ['*.js', '*.tpl']
1234
1234
  mimetypes = ['application/x-javascript+smarty',
1235
1235
  'text/x-javascript+smarty',
@@ -1311,8 +1311,8 @@ class JavascriptDjangoLexer(DelegatingLexer):
1311
1311
  """
1312
1312
 
1313
1313
  name = 'JavaScript+Django/Jinja'
1314
- aliases = ['js+django', 'javascript+django',
1315
- 'js+jinja', 'javascript+jinja']
1314
+ aliases = ['javascript+django', 'js+django',
1315
+ 'javascript+jinja', 'js+jinja']
1316
1316
  alias_filenames = ['*.js']
1317
1317
  mimetypes = ['application/x-javascript+django',
1318
1318
  'application/x-javascript+jinja',
@@ -1750,7 +1750,7 @@ class LassoJavascriptLexer(DelegatingLexer):
1750
1750
  """
1751
1751
 
1752
1752
  name = 'JavaScript+Lasso'
1753
- aliases = ['js+lasso', 'javascript+lasso']
1753
+ aliases = ['javascript+lasso', 'js+lasso']
1754
1754
  alias_filenames = ['*.js']
1755
1755
  mimetypes = ['application/x-javascript+lasso',
1756
1756
  'text/x-javascript+lasso',
@@ -24,7 +24,7 @@ class TeraTermLexer(RegexLexer):
24
24
  .. versionadded:: 2.4
25
25
  """
26
26
  name = 'Tera Term macro'
27
- aliases = ['ttl', 'teraterm', 'teratermmacro']
27
+ aliases = ['teratermmacro', 'teraterm', 'ttl']
28
28
  filenames = ['*.ttl']
29
29
  mimetypes = ['text/x-teratermmacro']
30
30
 
@@ -21,7 +21,7 @@ class GherkinLexer(RegexLexer):
21
21
  .. versionadded:: 1.2
22
22
  """
23
23
  name = 'Gherkin'
24
- aliases = ['cucumber', 'gherkin']
24
+ aliases = ['gherkin', 'cucumber']
25
25
  filenames = ['*.feature']
26
26
  mimetypes = ['text/x-gherkin']
27
27
 
@@ -43,6 +43,7 @@ class CoqLexer(RegexLexer):
43
43
  'Canonical', 'Coercion', 'Theorem', 'Lemma', 'Corollary',
44
44
  'Proposition', 'Fact', 'Remark', 'Example', 'Proof', 'Goal', 'Save',
45
45
  'Qed', 'Defined', 'Hint', 'Resolve', 'Rewrite', 'View', 'Search',
46
+ 'Abort', 'Admitted',
46
47
  'Show', 'Print', 'Printing', 'All', 'Graph', 'Projections', 'inside',
47
48
  'outside', 'Check', 'Global', 'Instance', 'Class', 'Existing',
48
49
  'Universe', 'Polymorphic', 'Monomorphic', 'Context'
@@ -55,7 +56,7 @@ class CoqLexer(RegexLexer):
55
56
  )
56
57
  keywords3 = (
57
58
  # Sorts
58
- 'Type', 'Prop',
59
+ 'Type', 'Prop', 'SProp',
59
60
  )
60
61
  keywords4 = (
61
62
  # Tactics
@@ -94,7 +95,8 @@ class CoqLexer(RegexLexer):
94
95
  '<->', '=', '>', '>]', r'>\}', r'\?', r'\?\?', r'\[', r'\[<', r'\[>',
95
96
  r'\[\|', ']', '_', '`', r'\{', r'\{<', r'\|', r'\|]', r'\}', '~', '=>',
96
97
  r'/\\', r'\\/', r'\{\|', r'\|\}',
97
- 'Π', 'λ',
98
+ # 'Π', 'Σ', # Not defined in the standard library
99
+ 'λ', '¬', '∧', '∨', '∀', '∃', '→', '↔', '≠', '≤', '≥',
98
100
  )
99
101
  operators = r'[!$%&*+\./:<=>?@^|~-]'
100
102
  prefix_syms = r'[!?~]'
@@ -0,0 +1,118 @@
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ pygments.lexers.thingsdb
4
+ ~~~~~~~~~~~~~~~~~~~~~~
5
+
6
+ Lexers for the ThingsDB language.
7
+
8
+ :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
9
+ :license: BSD, see LICENSE for details.
10
+ """
11
+
12
+ import re
13
+ from pygments.lexer import RegexLexer, include, bygroups
14
+ from pygments.token import Comment, Keyword, Name, Number, String, Text, \
15
+ Operator, Punctuation, Whitespace
16
+
17
+ __all__ = ['ThingsDBLexer']
18
+
19
+
20
+ class ThingsDBLexer(RegexLexer):
21
+ """
22
+ Lexer for the ThingsDB programming language.
23
+
24
+ .. versionadded:: 2.9
25
+ """
26
+ name = 'ThingsDB'
27
+ aliases = ['ti', 'thingsdb']
28
+ filenames = ['*.ti']
29
+
30
+ tokens = {
31
+ 'root': [
32
+ include('expression'),
33
+ ],
34
+ 'expression': [
35
+ include('comments'),
36
+ include('whitespace'),
37
+
38
+ # numbers
39
+ (r'[-+]?0b[01]+', Number.Bin),
40
+ (r'[-+]?0o[0-8]+', Number.Oct),
41
+ (r'([-+]?0x[0-9a-fA-F]+)', Number.Hex),
42
+ (r'[-+]?[0-9]+', Number.Integer),
43
+ (r'[-+]?((inf|nan)([^0-9A-Za-z_]|$)|[0-9]*\.[0-9]+(e[+-][0-9]+)?)',
44
+ Number.Float),
45
+
46
+ # strings
47
+ (r'(?:"(?:[^"]*)")+', String.Double),
48
+ (r"(?:'(?:[^']*)')+", String.Single),
49
+
50
+ # literals
51
+ (r'(true|false|nil)\b', Keyword.Constant),
52
+
53
+ # regular expressions
54
+ (r'(/[^/\\]*(?:\\.[^/\\]*)*/i?)', String.Regex),
55
+
56
+ # thing id's
57
+ (r'#[0-9]+', Comment.Preproc),
58
+
59
+ # name, assignments and functions
60
+ include('names'),
61
+
62
+ (r'[(){}\[\],;]', Punctuation),
63
+ (r'[+\-*/%&|<>^!~@=:?]', Operator),
64
+ ],
65
+ 'names': [
66
+ (r'(\.)'
67
+ r'(add|call|contains|del|endswith|extend|filter|find|findindex|'
68
+ r'get|has|id|indexof|keys|len|lower|map|pop|push|remove|set|sort|'
69
+ r'splice|startswith|test|unwrap|upper|values|wrap)'
70
+ r'(\()',
71
+ bygroups(Name.Function, Name.Function, Punctuation), 'arguments'),
72
+ (r'(array|assert|assert_err|auth_err|backup_info|backups_info|'
73
+ r'bad_data_err|bool|closure|collection_info|collections_info|'
74
+ r'counters|deep|del_backup|del_collection|del_expired|del_node|'
75
+ r'del_procedure|del_token|del_type|del_user|err|float|'
76
+ r'forbidden_err|grant|int|isarray|isascii|isbool|isbytes|iserr|'
77
+ r'isfloat|isinf|isint|islist|isnan|isnil|israw|isset|isstr|'
78
+ r'isthing|istuple|isutf8|lookup_err|max_quota_err|mod_type|new|'
79
+ r'new_backup|new_collection|new_node|new_procedure|new_token|'
80
+ r'new_type|new_user|node_err|node_info|nodes_info|now|'
81
+ r'num_arguments_err|operation_err|overflow_err|procedure_doc|'
82
+ r'procedure_info|procedures_info|raise|refs|rename_collection|'
83
+ r'rename_user|reset_counters|return|revoke|run|set_log_level|set|'
84
+ r'set_quota|set_type|shutdown|str|syntax_err|thing|try|type|'
85
+ r'type_err|type_count|type_info|types_info|user_info|users_info|'
86
+ r'value_err|wse|zero_div_err)'
87
+ r'(\()',
88
+ bygroups(Name.Function, Punctuation),
89
+ 'arguments'),
90
+ (r'(\.[A-Za-z_][0-9A-Za-z_]*)'
91
+ r'(\s*)(=)',
92
+ bygroups(Name.Attribute, Text, Operator)),
93
+ (r'\.[A-Za-z_][0-9A-Za-z_]*', Name.Attribute),
94
+ (r'([A-Za-z_][0-9A-Za-z_]*)(\s*)(=)',
95
+ bygroups(Name.Variable, Text, Operator)),
96
+ (r'[A-Za-z_][0-9A-Za-z_]*', Name.Variable),
97
+ ],
98
+ 'whitespace': [
99
+ (r'\n', Whitespace),
100
+ (r'\s+', Whitespace),
101
+ ],
102
+ 'comments': [
103
+ (r'//(.*?)\n', Comment.Single),
104
+ (r'/\*', Comment.Multiline, 'comment'),
105
+ ],
106
+ 'comment': [
107
+ (r'[^*/]+', Comment.Multiline),
108
+ (r'/\*', Comment.Multiline, '#push'),
109
+ (r'\*/', Comment.Multiline, '#pop'),
110
+ (r'[*/]', Comment.Multiline),
111
+ ],
112
+ 'arguments': [
113
+ include('expression'),
114
+ (',', Punctuation),
115
+ (r'\(', Punctuation, '#push'),
116
+ (r'\)', Punctuation, '#pop'),
117
+ ],
118
+ }
@@ -12,7 +12,7 @@ import re
12
12
 
13
13
  from pygments.lexer import Lexer
14
14
  from pygments.token import Text, Comment, Operator, Keyword, Name, Number, \
15
- Punctuation, Error
15
+ Punctuation, Error
16
16
 
17
17
  __all__ = ['TNTLexer']
18
18
 
@@ -66,15 +66,16 @@ class TNTLexer(Lexer):
66
66
  end += 1
67
67
  except IndexError:
68
68
  end = len(text)
69
- if required:
70
- assert end != start
69
+ if required and end == start:
70
+ raise AssertionError
71
71
  if end != start:
72
72
  self.cur.append((start, Text, text[start:end]))
73
73
  return end
74
74
 
75
75
  def variable(self, start, text):
76
76
  """Tokenize a variable."""
77
- assert text[start] in self.VARIABLES
77
+ if text[start] not in self.VARIABLES:
78
+ raise AssertionError
78
79
  end = start+1
79
80
  while text[end] in self.PRIMES:
80
81
  end += 1
@@ -97,10 +98,12 @@ class TNTLexer(Lexer):
97
98
  if text[start] == '(': # (...+...)
98
99
  self.cur.append((start, Punctuation, text[start]))
99
100
  start = self.term(start+1, text)
100
- assert text[start] in self.OPERATORS
101
+ if text[start] not in self.OPERATORS:
102
+ raise AssertionError
101
103
  self.cur.append((start, Operator, text[start]))
102
104
  start = self.term(start+1, text)
103
- assert text[start] == ')'
105
+ if text[start] != ')':
106
+ raise AssertionError
104
107
  self.cur.append((start, Punctuation, text[start]))
105
108
  return start+1
106
109
  raise AssertionError # no matches
@@ -116,21 +119,25 @@ class TNTLexer(Lexer):
116
119
  if text[start] in self.QUANTIFIERS: # Aa:<...>
117
120
  self.cur.append((start, Keyword.Declaration, text[start]))
118
121
  start = self.variable(start+1, text)
119
- assert text[start] == ':'
122
+ if text[start] != ':':
123
+ raise AssertionError
120
124
  self.cur.append((start, Punctuation, text[start]))
121
125
  return self.formula(start+1, text)
122
126
  if text[start] == '<': # <...&...>
123
127
  self.cur.append((start, Punctuation, text[start]))
124
128
  start = self.formula(start+1, text)
125
- assert text[start] in self.LOGIC
129
+ if text[start] not in self.LOGIC:
130
+ raise AssertionError
126
131
  self.cur.append((start, Operator, text[start]))
127
132
  start = self.formula(start+1, text)
128
- assert text[start] == '>'
133
+ if text[start] != '>':
134
+ raise AssertionError
129
135
  self.cur.append((start, Punctuation, text[start]))
130
136
  return start+1
131
137
  # ...=...
132
138
  start = self.term(start, text)
133
- assert text[start] == '='
139
+ if text[start] != '=':
140
+ raise AssertionError
134
141
  self.cur.append((start, Operator, text[start]))
135
142
  start = self.term(start+1, text)
136
143
  return start
@@ -138,7 +145,8 @@ class TNTLexer(Lexer):
138
145
  def rule(self, start, text):
139
146
  """Tokenize a rule."""
140
147
  match = self.RULES.match(text, start)
141
- assert match is not None
148
+ if match is None:
149
+ raise AssertionError
142
150
  groups = sorted(match.regs[1:]) # exclude whole match
143
151
  for group in groups:
144
152
  if group[0] >= 0: # this group matched
@@ -162,8 +170,10 @@ class TNTLexer(Lexer):
162
170
  self.cur.append((start+1, Text, text[start+1:end]))
163
171
  start = end
164
172
  match = self.LINENOS.match(text, start)
165
- assert match is not None
166
- assert text[match.end()] == ')'
173
+ if match is None:
174
+ raise AssertionError
175
+ if text[match.end()] != ')':
176
+ raise AssertionError
167
177
  self.cur.append((match.start(), Number.Integer, match.group(0)))
168
178
  self.cur.append((match.end(), Punctuation, text[match.end()]))
169
179
  return match.end() + 1
@@ -219,7 +229,7 @@ class TNTLexer(Lexer):
219
229
  orig = len(self.cur)
220
230
  try:
221
231
  start = end = self.formula(start, text)
222
- except AssertionError: # not well-formed
232
+ except (AssertionError, RecursionError): # not well-formed
223
233
  del self.cur[orig:]
224
234
  while text[end] not in self.WHITESPACE:
225
235
  end += 1
@@ -257,6 +267,6 @@ class TNTLexer(Lexer):
257
267
  try:
258
268
  del self.cur[orig:]
259
269
  except NameError:
260
- pass # if orig was never defined, fine
270
+ pass # if orig was never defined, fine
261
271
  self.error_till_line_end(start, text)
262
272
  return self.cur
@@ -23,7 +23,7 @@ class RtsLexer(RegexLexer):
23
23
  .. versionadded:: 2.1
24
24
  """
25
25
  name = 'TrafficScript'
26
- aliases = ['rts','trafficscript']
26
+ aliases = ['trafficscript', 'rts']
27
27
  filenames = ['*.rts']
28
28
 
29
29
  tokens = {
@@ -0,0 +1,120 @@
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ pygments.lexers.webassembly
4
+ ~~~~~~~~~~~~~~~~~~~
5
+
6
+ Lexers for the WebAssembly text format.
7
+
8
+ The grammar can be found at https://github.com/WebAssembly/spec/blob/master/interpreter/README.md
9
+ and https://webassembly.github.io/spec/core/text/.
10
+
11
+
12
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
13
+ :license: BSD, see LICENSE for details.
14
+ """
15
+
16
+ from pygments.lexer import RegexLexer, words, bygroups, default
17
+ from pygments.token import Text, Comment, Operator, Keyword, String, Number, Punctuation, Literal, Error, Name
18
+
19
+ __all__ = ['WatLexer']
20
+
21
+ keywords = (
22
+ 'module', 'import', 'func', 'funcref', 'start', 'param', 'local', 'type',
23
+ 'result', 'export', 'memory', 'global', 'mut', 'data', 'table', 'elem',
24
+ 'if', 'then', 'else', 'end', 'block', 'loop'
25
+ )
26
+
27
+ builtins = (
28
+ 'unreachable', 'nop', 'block', 'loop', 'if', 'else', 'end', 'br', 'br_if',
29
+ 'br_table', 'return', 'call', 'call_indirect', 'drop', 'select',
30
+ 'local.get', 'local.set', 'local.tee', 'global.get', 'global.set',
31
+ 'i32.load', 'i64.load', 'f32.load', 'f64.load', 'i32.load8_s',
32
+ 'i32.load8_u', 'i32.load16_s', 'i32.load16_u', 'i64.load8_s',
33
+ 'i64.load8_u', 'i64.load16_s', 'i64.load16_u', 'i64.load32_s',
34
+ 'i64.load32_u', 'i32.store', 'i64.store', 'f32.store', 'f64.store',
35
+ 'i32.store8', 'i32.store16', 'i64.store8', 'i64.store16', 'i64.store32',
36
+ 'memory.size', 'memory.grow', 'i32.const', 'i64.const', 'f32.const',
37
+ 'f64.const', 'i32.eqz', 'i32.eq', 'i32.ne', 'i32.lt_s', 'i32.lt_u',
38
+ 'i32.gt_s', 'i32.gt_u', 'i32.le_s', 'i32.le_u', 'i32.ge_s', 'i32.ge_u',
39
+ 'i64.eqz', 'i64.eq', 'i64.ne', 'i64.lt_s', 'i64.lt_u', 'i64.gt_s',
40
+ 'i64.gt_u', 'i64.le_s', 'i64.le_u', 'i64.ge_s', 'i64.ge_u', 'f32.eq',
41
+ 'f32.ne', 'f32.lt', 'f32.gt', 'f32.le', 'f32.ge', 'f64.eq', 'f64.ne',
42
+ 'f64.lt', 'f64.gt', 'f64.le', 'f64.ge', 'i32.clz', 'i32.ctz', 'i32.popcnt',
43
+ 'i32.add', 'i32.sub', 'i32.mul', 'i32.div_s', 'i32.div_u', 'i32.rem_s',
44
+ 'i32.rem_u', 'i32.and', 'i32.or', 'i32.xor', 'i32.shl', 'i32.shr_s',
45
+ 'i32.shr_u', 'i32.rotl', 'i32.rotr', 'i64.clz', 'i64.ctz', 'i64.popcnt',
46
+ 'i64.add', 'i64.sub', 'i64.mul', 'i64.div_s', 'i64.div_u', 'i64.rem_s',
47
+ 'i64.rem_u', 'i64.and', 'i64.or', 'i64.xor', 'i64.shl', 'i64.shr_s',
48
+ 'i64.shr_u', 'i64.rotl', 'i64.rotr', 'f32.abs', 'f32.neg', 'f32.ceil',
49
+ 'f32.floor', 'f32.trunc', 'f32.nearest', 'f32.sqrt', 'f32.add', 'f32.sub',
50
+ 'f32.mul', 'f32.div', 'f32.min', 'f32.max', 'f32.copysign', 'f64.abs',
51
+ 'f64.neg', 'f64.ceil', 'f64.floor', 'f64.trunc', 'f64.nearest', 'f64.sqrt',
52
+ 'f64.add', 'f64.sub', 'f64.mul', 'f64.div', 'f64.min', 'f64.max',
53
+ 'f64.copysign', 'i32.wrap_i64', 'i32.trunc_f32_s', 'i32.trunc_f32_u',
54
+ 'i32.trunc_f64_s', 'i32.trunc_f64_u', 'i64.extend_i32_s',
55
+ 'i64.extend_i32_u', 'i64.trunc_f32_s', 'i64.trunc_f32_u',
56
+ 'i64.trunc_f64_s', 'i64.trunc_f64_u', 'f32.convert_i32_s',
57
+ 'f32.convert_i32_u', 'f32.convert_i64_s', 'f32.convert_i64_u',
58
+ 'f32.demote_f64', 'f64.convert_i32_s', 'f64.convert_i32_u',
59
+ 'f64.convert_i64_s', 'f64.convert_i64_u', 'f64.promote_f32',
60
+ 'i32.reinterpret_f32', 'i64.reinterpret_f64', 'f32.reinterpret_i32',
61
+ 'f64.reinterpret_i64',
62
+ )
63
+
64
+
65
+ class WatLexer(RegexLexer):
66
+ """Lexer for the `WebAssembly text format <https://webassembly.org/>`_.
67
+
68
+ .. versionadded:: 2.9
69
+ """
70
+
71
+ name = 'WebAssembly'
72
+ aliases = ['wast', 'wat']
73
+ filenames = ['*.wat', '*.wast']
74
+
75
+ tokens = {
76
+ 'root': [
77
+ (words(keywords, suffix=r'(?=[^a-z_\.])'), Keyword),
78
+ (words(builtins), Name.Builtin, 'arguments'),
79
+ (words(['i32', 'i64', 'f32', 'f64']), Keyword.Type),
80
+ (r'\$[A-Za-z0-9!#$%&\'*+./:<=>?@\\^_`|~-]+', Name.Variable), # yes, all of the are valid in identifiers
81
+ (r';;.*?$', Comment.Single),
82
+ (r'\(;', Comment.Multiline, 'nesting_comment'),
83
+ (r'[+-]?0x[\dA-Fa-f](_?[\dA-Fa-f])*(.([\dA-Fa-f](_?[\dA-Fa-f])*)?)?([pP][+-]?[\dA-Fa-f](_?[\dA-Fa-f])*)?', Number.Float),
84
+ (r'[+-]?\d.\d(_?\d)*[eE][+-]?\d(_?\d)*', Number.Float),
85
+ (r'[+-]?\d.\d(_?\d)*', Number.Float),
86
+ (r'[+-]?\d.[eE][+-]?\d(_?\d)*', Number.Float),
87
+ (r'[+-]?(inf|nan:0x[\dA-Fa-f](_?[\dA-Fa-f])*|nan)', Number.Float),
88
+ (r'[+-]?0x[\dA-Fa-f](_?[\dA-Fa-f])*', Number.Hex),
89
+ (r'[+-]?\d(_?\d)*', Number.Integer),
90
+ (r'[\(\)]', Punctuation),
91
+ (r'"', String.Double, 'string'),
92
+ (r'\s+', Text),
93
+ ],
94
+ 'nesting_comment': [
95
+ (r'\(;', Comment.Multiline, '#push'),
96
+ (r';\)', Comment.Multiline, '#pop'),
97
+ (r'[^;(]+', Comment.Multiline),
98
+ (r'[;(]', Comment.Multiline),
99
+ ],
100
+ 'string': [
101
+ (r'\\[\dA-Fa-f][\dA-Fa-f]', String.Escape), # must have exactly two hex digits
102
+ (r'\\t', String.Escape),
103
+ (r'\\n', String.Escape),
104
+ (r'\\r', String.Escape),
105
+ (r'\\"', String.Escape),
106
+ (r"\\'", String.Escape),
107
+ (r'\\u\{[\dA-Fa-f](_?[\dA-Fa-f])*\}', String.Escape),
108
+ (r'\\\\', String.Escape),
109
+ (r'"', String.Double, '#pop'),
110
+ (r'[^"\\]+', String.Double),
111
+ ],
112
+ 'arguments': [
113
+ (r'\s+', Text),
114
+ (r'(offset)(=)(0x[\dA-Fa-f](_?[\dA-Fa-f])*)', bygroups(Keyword, Operator, Number.Hex)),
115
+ (r'(offset)(=)(\d(_?\d)*)', bygroups(Keyword, Operator, Number.Integer)),
116
+ (r'(align)(=)(0x[\dA-Fa-f](_?[\dA-Fa-f])*)', bygroups(Keyword, Operator, Number.Hex)),
117
+ (r'(align)(=)(\d(_?\d)*)', bygroups(Keyword, Operator, Number.Integer)),
118
+ default('#pop'),
119
+ ]
120
+ }
@@ -15,7 +15,7 @@ from os.path import commonprefix
15
15
  from itertools import groupby
16
16
  from operator import itemgetter
17
17
 
18
- CS_ESCAPE = re.compile(r'[\^\\\-\]]')
18
+ CS_ESCAPE = re.compile(r'[\[\^\\\-\]]')
19
19
  FIRST_ELEMENT = itemgetter(0)
20
20
 
21
21
 
@@ -134,7 +134,7 @@ class StyleMeta(type):
134
134
  color = _ansimap[color]
135
135
  bgcolor = t[4]
136
136
  if bgcolor in _deprecated_ansicolors:
137
- bgcolor = _deprecated_ansicolors[color]
137
+ bgcolor = _deprecated_ansicolors[bgcolor]
138
138
  if bgcolor in ansicolors:
139
139
  bgansicolor = bgcolor
140
140
  bgcolor = _ansimap[bgcolor]
@@ -52,6 +52,8 @@ STYLE_MAP = {
52
52
  'stata-dark': 'stata_dark::StataDarkStyle',
53
53
  'inkpot': 'inkpot::InkPotStyle',
54
54
  'zenburn': 'zenburn::ZenburnStyle',
55
+ 'gruvbox-dark': 'gruvbox::GruvboxDarkStyle',
56
+ 'gruvbox-light': 'gruvbox::GruvboxLightStyle',
55
57
  }
56
58
 
57
59
 
@@ -20,6 +20,7 @@ class FriendlyStyle(Style):
20
20
 
21
21
  background_color = "#f0f0f0"
22
22
  default_style = ""
23
+ line_number_color = "#666666"
23
24
 
24
25
  styles = {
25
26
  Whitespace: "#bbbbbb",