pygments.rb 0.3.2 → 0.3.3

Sign up to get free protection for your applications and to get access to all the features.
Files changed (70) hide show
  1. data/README.md +6 -1
  2. data/lexers +0 -0
  3. data/lib/pygments/version.rb +1 -1
  4. data/vendor/pygments-main/AUTHORS +15 -0
  5. data/vendor/pygments-main/CHANGES +28 -1
  6. data/vendor/pygments-main/LICENSE +1 -1
  7. data/vendor/pygments-main/external/lasso-builtins-generator-9.lasso +121 -0
  8. data/vendor/pygments-main/pygments/cmdline.py +1 -1
  9. data/vendor/pygments-main/pygments/filters/__init__.py +0 -1
  10. data/vendor/pygments-main/pygments/formatters/_mapping.py +2 -2
  11. data/vendor/pygments-main/pygments/formatters/img.py +1 -1
  12. data/vendor/pygments-main/pygments/formatters/latex.py +8 -8
  13. data/vendor/pygments-main/pygments/formatters/other.py +0 -2
  14. data/vendor/pygments-main/pygments/lexers/_lassobuiltins.py +5413 -0
  15. data/vendor/pygments-main/pygments/lexers/_mapping.py +36 -11
  16. data/vendor/pygments-main/pygments/lexers/_openedgebuiltins.py +551 -0
  17. data/vendor/pygments-main/pygments/lexers/_postgres_builtins.py +0 -1
  18. data/vendor/pygments-main/pygments/lexers/_robotframeworklexer.py +546 -0
  19. data/vendor/pygments-main/pygments/lexers/_sourcemodbuiltins.py +1072 -0
  20. data/vendor/pygments-main/pygments/lexers/_stan_builtins.py +174 -0
  21. data/vendor/pygments-main/pygments/lexers/_vimbuiltins.py +13 -3
  22. data/vendor/pygments-main/pygments/lexers/agile.py +145 -33
  23. data/vendor/pygments-main/pygments/lexers/asm.py +2 -2
  24. data/vendor/pygments-main/pygments/lexers/compiled.py +328 -36
  25. data/vendor/pygments-main/pygments/lexers/dalvik.py +104 -0
  26. data/vendor/pygments-main/pygments/lexers/dotnet.py +8 -14
  27. data/vendor/pygments-main/pygments/lexers/functional.py +773 -8
  28. data/vendor/pygments-main/pygments/lexers/jvm.py +184 -36
  29. data/vendor/pygments-main/pygments/lexers/math.py +349 -23
  30. data/vendor/pygments-main/pygments/lexers/other.py +315 -492
  31. data/vendor/pygments-main/pygments/lexers/parsers.py +83 -1
  32. data/vendor/pygments-main/pygments/lexers/shell.py +4 -1
  33. data/vendor/pygments-main/pygments/lexers/templates.py +112 -2
  34. data/vendor/pygments-main/pygments/lexers/text.py +52 -3
  35. data/vendor/pygments-main/pygments/lexers/web.py +382 -36
  36. data/vendor/pygments-main/pygments/unistring.py +35 -25
  37. data/vendor/pygments-main/pygments/util.py +45 -0
  38. data/vendor/pygments-main/tests/examplefiles/Config.in.cache +1973 -0
  39. data/vendor/pygments-main/tests/examplefiles/example.Rd +78 -0
  40. data/vendor/pygments-main/tests/examplefiles/example.bug +54 -0
  41. data/vendor/pygments-main/tests/examplefiles/example.ceylon +33 -0
  42. data/vendor/pygments-main/tests/examplefiles/example.jag +48 -0
  43. data/vendor/pygments-main/tests/examplefiles/example.monkey +152 -0
  44. data/vendor/pygments-main/tests/examplefiles/example.msc +43 -0
  45. data/vendor/pygments-main/tests/examplefiles/example.reg +19 -0
  46. data/vendor/pygments-main/tests/examplefiles/example.rkt +95 -0
  47. data/vendor/pygments-main/tests/examplefiles/example.rpf +4 -0
  48. data/vendor/pygments-main/tests/examplefiles/example.stan +97 -0
  49. data/vendor/pygments-main/tests/examplefiles/example.xtend +34 -0
  50. data/vendor/pygments-main/tests/examplefiles/example2.msc +79 -0
  51. data/vendor/pygments-main/tests/examplefiles/garcia-wachs.kk +123 -0
  52. data/vendor/pygments-main/tests/examplefiles/hello.smali +40 -0
  53. data/vendor/pygments-main/tests/examplefiles/hello.sp +9 -0
  54. data/vendor/pygments-main/tests/examplefiles/http_request_example +2 -1
  55. data/vendor/pygments-main/tests/examplefiles/http_response_example +4 -2
  56. data/vendor/pygments-main/tests/examplefiles/inet_pton6.dg +71 -0
  57. data/vendor/pygments-main/tests/examplefiles/json.lasso +301 -0
  58. data/vendor/pygments-main/tests/examplefiles/json.lasso9 +213 -0
  59. data/vendor/pygments-main/tests/examplefiles/livescript-demo.ls +41 -0
  60. data/vendor/pygments-main/tests/examplefiles/matlab_sample +5 -2
  61. data/vendor/pygments-main/tests/examplefiles/metagrammar.treetop +455 -0
  62. data/vendor/pygments-main/tests/examplefiles/pytb_test3.pytb +4 -0
  63. data/vendor/pygments-main/tests/examplefiles/robotframework.txt +39 -0
  64. data/vendor/pygments-main/tests/examplefiles/rust_example.rs +743 -0
  65. data/vendor/pygments-main/tests/examplefiles/test.R +149 -115
  66. data/vendor/pygments-main/tests/examplefiles/test.cu +36 -0
  67. data/vendor/pygments-main/tests/test_basic_api.py +1 -1
  68. data/vendor/pygments-main/tests/test_util.py +18 -0
  69. metadata +34 -3
  70. data/vendor/pygments-main/REVISION +0 -1
@@ -28,7 +28,8 @@ __all__ = ['RagelLexer', 'RagelEmbeddedLexer', 'RagelCLexer', 'RagelDLexer',
28
28
  'AntlrPerlLexer', 'AntlrRubyLexer', 'AntlrCppLexer',
29
29
  #'AntlrCLexer',
30
30
  'AntlrCSharpLexer', 'AntlrObjectiveCLexer',
31
- 'AntlrJavaLexer', "AntlrActionScriptLexer"]
31
+ 'AntlrJavaLexer', "AntlrActionScriptLexer",
32
+ 'TreetopLexer']
32
33
 
33
34
 
34
35
  class RagelLexer(RegexLexer):
@@ -693,3 +694,84 @@ class AntlrActionScriptLexer(DelegatingLexer):
693
694
  def analyse_text(text):
694
695
  return AntlrLexer.analyse_text(text) and \
695
696
  re.search(r'^\s*language\s*=\s*ActionScript\s*;', text, re.M)
697
+
698
+ class TreetopBaseLexer(RegexLexer):
699
+ """
700
+ A base lexer for `Treetop <http://treetop.rubyforge.org/>`_ grammars.
701
+ Not for direct use; use TreetopLexer instead.
702
+
703
+ *New in Pygments 1.6.*
704
+ """
705
+
706
+ tokens = {
707
+ 'root': [
708
+ include('space'),
709
+ (r'require[ \t]+[^\n\r]+[\n\r]', Other),
710
+ (r'module\b', Keyword.Namespace, 'module'),
711
+ (r'grammar\b', Keyword, 'grammar'),
712
+ ],
713
+ 'module': [
714
+ include('space'),
715
+ include('end'),
716
+ (r'module\b', Keyword, '#push'),
717
+ (r'grammar\b', Keyword, 'grammar'),
718
+ (r'[A-Z][A-Za-z_0-9]*(?:::[A-Z][A-Za-z_0-9]*)*', Name.Namespace),
719
+ ],
720
+ 'grammar': [
721
+ include('space'),
722
+ include('end'),
723
+ (r'rule\b', Keyword, 'rule'),
724
+ (r'include\b', Keyword, 'include'),
725
+ (r'[A-Z][A-Za-z_0-9]*', Name),
726
+ ],
727
+ 'include': [
728
+ include('space'),
729
+ (r'[A-Z][A-Za-z_0-9]*(?:::[A-Z][A-Za-z_0-9]*)*', Name.Class, '#pop'),
730
+ ],
731
+ 'rule': [
732
+ include('space'),
733
+ include('end'),
734
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
735
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
736
+ (r'([A-Za-z_][A-Za-z_0-9]*)(:)', bygroups(Name.Label, Punctuation)),
737
+ (r'[A-Za-z_][A-Za-z_0-9]*', Name),
738
+ (r'[()]', Punctuation),
739
+ (r'[?+*/&!~]', Operator),
740
+ (r'\[(?:\\.|\[:\^?[a-z]+:\]|[^\\\]])+\]', String.Regex),
741
+ (r'([0-9]*)(\.\.)([0-9]*)', bygroups(Number.Integer, Operator, Number.Integer)),
742
+ (r'(<)([^>]+)(>)', bygroups(Punctuation, Name.Class, Punctuation)),
743
+ (r'{', Punctuation, 'inline_module'),
744
+ (r'\.', String.Regex),
745
+ ],
746
+ 'inline_module': [
747
+ (r'{', Other, 'ruby'),
748
+ (r'}', Punctuation, '#pop'),
749
+ (r'[^{}]+', Other),
750
+ ],
751
+ 'ruby': [
752
+ (r'{', Other, '#push'),
753
+ (r'}', Other, '#pop'),
754
+ (r'[^{}]+', Other),
755
+ ],
756
+ 'space': [
757
+ (r'[ \t\n\r]+', Whitespace),
758
+ (r'#[^\n]*', Comment.Single),
759
+ ],
760
+ 'end': [
761
+ (r'end\b', Keyword, '#pop'),
762
+ ],
763
+ }
764
+
765
+ class TreetopLexer(DelegatingLexer):
766
+ """
767
+ A lexer for `Treetop <http://treetop.rubyforge.org/>`_ grammars.
768
+
769
+ *New in Pygments 1.6.*
770
+ """
771
+
772
+ name = 'Treetop'
773
+ aliases = ['treetop']
774
+ filenames = ['*.treetop', '*.tt']
775
+
776
+ def __init__(self, **options):
777
+ super(TreetopLexer, self).__init__(RubyLexer, TreetopBaseLexer, **options)
@@ -31,7 +31,7 @@ class BashLexer(RegexLexer):
31
31
  """
32
32
 
33
33
  name = 'Bash'
34
- aliases = ['bash', 'sh', 'ksh', 'shell']
34
+ aliases = ['bash', 'sh', 'ksh']
35
35
  filenames = ['*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass',
36
36
  '.bashrc', 'bashrc', '.bash_*', 'bash_*']
37
37
  mimetypes = ['application/x-sh', 'application/x-shellscript']
@@ -60,6 +60,7 @@ class BashLexer(RegexLexer):
60
60
  (r'\\[\w\W]', String.Escape),
61
61
  (r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
62
62
  (r'[\[\]{}()=]', Operator),
63
+ (r'<<<', Operator), # here-string
63
64
  (r'<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
64
65
  (r'&&|\|\|', Operator),
65
66
  ],
@@ -328,6 +329,8 @@ class PowerShellLexer(RegexLexer):
328
329
  (r'(&lt;|<)#', Comment.Multiline, 'multline'),
329
330
  (r'@"\n.*?\n"@', String.Heredoc),
330
331
  (r"@'\n.*?\n'@", String.Heredoc),
332
+ # escaped syntax
333
+ (r'`[\'"$@-]', Punctuation),
331
334
  (r'"', String.Double, 'string'),
332
335
  (r"'([^']|'')*'", String.Single),
333
336
  (r'(\$|@@|@)((global|script|private|env):)?[a-z0-9_]+',
@@ -12,7 +12,7 @@
12
12
  import re
13
13
 
14
14
  from pygments.lexers.web import \
15
- PhpLexer, HtmlLexer, XmlLexer, JavascriptLexer, CssLexer
15
+ PhpLexer, HtmlLexer, XmlLexer, JavascriptLexer, CssLexer, LassoLexer
16
16
  from pygments.lexers.agile import PythonLexer, PerlLexer
17
17
  from pygments.lexers.compiled import JavaLexer
18
18
  from pygments.lexers.jvm import TeaLangLexer
@@ -37,7 +37,8 @@ __all__ = ['HtmlPhpLexer', 'XmlPhpLexer', 'CssPhpLexer',
37
37
  'CheetahXmlLexer', 'CheetahJavascriptLexer', 'EvoqueLexer',
38
38
  'EvoqueHtmlLexer', 'EvoqueXmlLexer', 'ColdfusionLexer',
39
39
  'ColdfusionHtmlLexer', 'VelocityLexer', 'VelocityHtmlLexer',
40
- 'VelocityXmlLexer', 'SspLexer', 'TeaTemplateLexer']
40
+ 'VelocityXmlLexer', 'SspLexer', 'TeaTemplateLexer', 'LassoHtmlLexer',
41
+ 'LassoXmlLexer', 'LassoCssLexer', 'LassoJavascriptLexer']
41
42
 
42
43
 
43
44
  class ErbLexer(Lexer):
@@ -1629,3 +1630,112 @@ class TeaTemplateLexer(DelegatingLexer):
1629
1630
  if '<%' in text and '%>' in text:
1630
1631
  rv += 0.1
1631
1632
  return rv
1633
+
1634
+
1635
+ class LassoHtmlLexer(DelegatingLexer):
1636
+ """
1637
+ Subclass of the `LassoLexer` which highlights unhandled data with the
1638
+ `HtmlLexer`.
1639
+
1640
+ Nested JavaScript and CSS is also highlighted.
1641
+
1642
+ *New in Pygments 1.6.*
1643
+ """
1644
+
1645
+ name = 'HTML+Lasso'
1646
+ aliases = ['html+lasso']
1647
+ alias_filenames = ['*.html', '*.htm', '*.xhtml', '*.lasso', '*.lasso[89]',
1648
+ '*.incl', '*.inc', '*.las']
1649
+ mimetypes = ['text/html+lasso',
1650
+ 'application/x-httpd-lasso',
1651
+ 'application/x-httpd-lasso[89]']
1652
+
1653
+ def __init__(self, **options):
1654
+ options['requiredelimiters'] = True
1655
+ super(LassoHtmlLexer, self).__init__(HtmlLexer, LassoLexer, **options)
1656
+
1657
+ def analyse_text(text):
1658
+ rv = LassoLexer.analyse_text(text)
1659
+ if re.search(r'<\w+>', text, re.I):
1660
+ rv += 0.2
1661
+ if html_doctype_matches(text):
1662
+ rv += 0.5
1663
+ return rv
1664
+
1665
+
1666
+ class LassoXmlLexer(DelegatingLexer):
1667
+ """
1668
+ Subclass of the `LassoLexer` which highlights unhandled data with the
1669
+ `XmlLexer`.
1670
+
1671
+ *New in Pygments 1.6.*
1672
+ """
1673
+
1674
+ name = 'XML+Lasso'
1675
+ aliases = ['xml+lasso']
1676
+ alias_filenames = ['*.xml', '*.lasso', '*.lasso[89]',
1677
+ '*.incl', '*.inc', '*.las']
1678
+ mimetypes = ['application/xml+lasso']
1679
+
1680
+ def __init__(self, **options):
1681
+ options['requiredelimiters'] = True
1682
+ super(LassoXmlLexer, self).__init__(XmlLexer, LassoLexer, **options)
1683
+
1684
+ def analyse_text(text):
1685
+ rv = LassoLexer.analyse_text(text)
1686
+ if looks_like_xml(text):
1687
+ rv += 0.5
1688
+ return rv
1689
+
1690
+
1691
+ class LassoCssLexer(DelegatingLexer):
1692
+ """
1693
+ Subclass of the `LassoLexer` which highlights unhandled data with the
1694
+ `CssLexer`.
1695
+
1696
+ *New in Pygments 1.6.*
1697
+ """
1698
+
1699
+ name = 'CSS+Lasso'
1700
+ aliases = ['css+lasso']
1701
+ alias_filenames = ['*.css']
1702
+ mimetypes = ['text/css+lasso']
1703
+
1704
+ def __init__(self, **options):
1705
+ options['requiredelimiters'] = True
1706
+ super(LassoCssLexer, self).__init__(CssLexer, LassoLexer, **options)
1707
+
1708
+ def analyse_text(text):
1709
+ rv = LassoLexer.analyse_text(text)
1710
+ if re.search(r'\w+:.+;', text):
1711
+ rv += 0.1
1712
+ if 'padding:' in text:
1713
+ rv += 0.1
1714
+ return rv
1715
+
1716
+
1717
+ class LassoJavascriptLexer(DelegatingLexer):
1718
+ """
1719
+ Subclass of the `LassoLexer` which highlights unhandled data with the
1720
+ `JavascriptLexer`.
1721
+
1722
+ *New in Pygments 1.6.*
1723
+ """
1724
+
1725
+ name = 'JavaScript+Lasso'
1726
+ aliases = ['js+lasso', 'javascript+lasso']
1727
+ alias_filenames = ['*.js']
1728
+ mimetypes = ['application/x-javascript+lasso',
1729
+ 'text/x-javascript+lasso',
1730
+ 'text/javascript+lasso']
1731
+
1732
+ def __init__(self, **options):
1733
+ options['requiredelimiters'] = True
1734
+ super(LassoJavascriptLexer, self).__init__(JavascriptLexer, LassoLexer,
1735
+ **options)
1736
+
1737
+ def analyse_text(text):
1738
+ rv = LassoLexer.analyse_text(text)
1739
+ if 'function' in text:
1740
+ rv += 0.2
1741
+ return rv
@@ -25,7 +25,7 @@ __all__ = ['IniLexer', 'PropertiesLexer', 'SourcesListLexer', 'BaseMakefileLexer
25
25
  'RstLexer', 'VimLexer', 'GettextLexer', 'SquidConfLexer',
26
26
  'DebianControlLexer', 'DarcsPatchLexer', 'YamlLexer',
27
27
  'LighttpdConfLexer', 'NginxConfLexer', 'CMakeLexer', 'HttpLexer',
28
- 'PyPyLogLexer']
28
+ 'PyPyLogLexer', 'RegeditLexer']
29
29
 
30
30
 
31
31
  class IniLexer(RegexLexer):
@@ -41,7 +41,7 @@ class IniLexer(RegexLexer):
41
41
  tokens = {
42
42
  'root': [
43
43
  (r'\s+', Text),
44
- (r'[;#].*?$', Comment),
44
+ (r'[;#].*', Comment.Single),
45
45
  (r'\[.*?\]$', Keyword),
46
46
  (r'(.*?)([ \t]*)(=)([ \t]*)(.*(?:\n[ \t].+)*)',
47
47
  bygroups(Name.Attribute, Text, Operator, Text, String))
@@ -55,6 +55,49 @@ class IniLexer(RegexLexer):
55
55
  return text[0] == '[' and text[npos-1] == ']'
56
56
 
57
57
 
58
+ class RegeditLexer(RegexLexer):
59
+ """
60
+ Lexer for `Windows Registry
61
+ <http://en.wikipedia.org/wiki/Windows_Registry#.REG_files>`_ files produced
62
+ by regedit.
63
+
64
+ *New in Pygments 1.6.*
65
+ """
66
+
67
+ name = 'reg'
68
+ aliases = []
69
+ filenames = ['*.reg']
70
+ mimetypes = ['text/x-windows-registry']
71
+
72
+ tokens = {
73
+ 'root': [
74
+ (r'Windows Registry Editor.*', Text),
75
+ (r'\s+', Text),
76
+ (r'[;#].*', Comment.Single),
77
+ (r'(\[)(-?)(HKEY_[A-Z_]+)(.*?\])$',
78
+ bygroups(Keyword, Operator, Name.Builtin, Keyword)),
79
+ # String keys, which obey somewhat normal escaping
80
+ (r'("(?:\\"|\\\\|[^"])+")([ \t]*)(=)([ \t]*)',
81
+ bygroups(Name.Attribute, Text, Operator, Text),
82
+ 'value'),
83
+ # Bare keys (includes @)
84
+ (r'(.*?)([ \t]*)(=)([ \t]*)',
85
+ bygroups(Name.Attribute, Text, Operator, Text),
86
+ 'value'),
87
+ ],
88
+ 'value': [
89
+ (r'-', Operator, '#pop'), # delete value
90
+ (r'(dword|hex(?:\([0-9a-fA-F]\))?)(:)([0-9a-fA-F,]+)',
91
+ bygroups(Name.Variable, Punctuation, Number), '#pop'),
92
+ # As far as I know, .reg files do not support line continuation.
93
+ (r'.*', String, '#pop'),
94
+ ]
95
+ }
96
+
97
+ def analyse_text(text):
98
+ return text.startswith('Windows Registry Editor')
99
+
100
+
58
101
  class PropertiesLexer(RegexLexer):
59
102
  """
60
103
  Lexer for configuration files in Java's properties format.
@@ -187,7 +230,7 @@ class BaseMakefileLexer(RegexLexer):
187
230
  bygroups(Keyword, Text), 'export'),
188
231
  (r'export\s+', Keyword),
189
232
  # assignment
190
- (r'([a-zA-Z0-9_${}.-]+)(\s*)([!?:+]?=)([ \t]*)((?:.*\\\n|.*\n)+)',
233
+ (r'([a-zA-Z0-9_${}.-]+)(\s*)([!?:+]?=)([ \t]*)((?:.*\\\n)+|.*\n)',
191
234
  bygroups(Name.Variable, Text, Operator, Text, using(BashLexer))),
192
235
  # strings
193
236
  (r'(?s)"(\\\\|\\.|[^"\\])*"', String.Double),
@@ -1643,6 +1686,11 @@ class HttpLexer(RegexLexer):
1643
1686
  yield match.start(5), Literal, match.group(5)
1644
1687
  yield match.start(6), Text, match.group(6)
1645
1688
 
1689
+ def continuous_header_callback(self, match):
1690
+ yield match.start(1), Text, match.group(1)
1691
+ yield match.start(2), Literal, match.group(2)
1692
+ yield match.start(3), Text, match.group(3)
1693
+
1646
1694
  def content_callback(self, match):
1647
1695
  content_type = getattr(self, 'content_type', None)
1648
1696
  content = match.group()
@@ -1673,6 +1721,7 @@ class HttpLexer(RegexLexer):
1673
1721
  ],
1674
1722
  'headers': [
1675
1723
  (r'([^\s:]+)( *)(:)( *)([^\r\n]+)(\r?\n|$)', header_callback),
1724
+ (r'([\t ]+)([^\r\n]+)(\r?\n|$)', continuous_header_callback),
1676
1725
  (r'\r?\n', Text, 'content')
1677
1726
  ],
1678
1727
  'content': [
@@ -17,16 +17,17 @@ from pygments.lexer import RegexLexer, ExtendedRegexLexer, bygroups, using, \
17
17
  from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
18
18
  Number, Other, Punctuation, Literal
19
19
  from pygments.util import get_bool_opt, get_list_opt, looks_like_xml, \
20
- html_doctype_matches
20
+ html_doctype_matches, unirange
21
21
  from pygments.lexers.agile import RubyLexer
22
22
  from pygments.lexers.compiled import ScalaLexer
23
23
 
24
24
 
25
- __all__ = ['HtmlLexer', 'XmlLexer', 'JavascriptLexer', 'JSONLexer', 'CssLexer',
25
+ __all__ = ['HtmlLexer', 'XmlLexer', 'JavascriptLexer', 'JsonLexer', 'CssLexer',
26
26
  'PhpLexer', 'ActionScriptLexer', 'XsltLexer', 'ActionScript3Lexer',
27
27
  'MxmlLexer', 'HaxeLexer', 'HamlLexer', 'SassLexer', 'ScssLexer',
28
- 'ObjectiveJLexer', 'CoffeeScriptLexer', 'DuelLexer', 'ScamlLexer',
29
- 'JadeLexer', 'XQueryLexer', 'DtdLexer', 'DartLexer']
28
+ 'ObjectiveJLexer', 'CoffeeScriptLexer', 'LiveScriptLexer',
29
+ 'DuelLexer', 'ScamlLexer', 'JadeLexer', 'XQueryLexer',
30
+ 'DtdLexer', 'DartLexer', 'LassoLexer']
30
31
 
31
32
 
32
33
  class JavascriptLexer(RegexLexer):
@@ -89,7 +90,7 @@ class JavascriptLexer(RegexLexer):
89
90
  }
90
91
 
91
92
 
92
- class JSONLexer(RegexLexer):
93
+ class JsonLexer(RegexLexer):
93
94
  """
94
95
  For JSON data structures.
95
96
 
@@ -103,10 +104,10 @@ class JSONLexer(RegexLexer):
103
104
 
104
105
  # integer part of a number
105
106
  int_part = r'-?(0|[1-9]\d*)'
106
-
107
+
107
108
  # fractional part of a number
108
109
  frac_part = r'\.\d+'
109
-
110
+
110
111
  # exponential part of a number
111
112
  exp_part = r'[eE](\+|-)?\d+'
112
113
 
@@ -169,6 +170,8 @@ class JSONLexer(RegexLexer):
169
170
 
170
171
  }
171
172
 
173
+ JSONLexer = JsonLexer # for backwards compatibility with Pygments 1.5
174
+
172
175
 
173
176
  class ActionScriptLexer(RegexLexer):
174
177
  """
@@ -828,7 +831,7 @@ class PhpLexer(RegexLexer):
828
831
  r'endif|list|__LINE__|endswitch|new|__sleep|endwhile|not|'
829
832
  r'array|__wakeup|E_ALL|NULL|final|php_user_filter|interface|'
830
833
  r'implements|public|private|protected|abstract|clone|try|'
831
- r'catch|throw|this|use|namespace)\b', Keyword),
834
+ r'catch|throw|this|use|namespace|trait)\b', Keyword),
832
835
  (r'(true|false|null)\b', Keyword.Constant),
833
836
  (r'\$\{\$+[a-zA-Z_][a-zA-Z0-9_]*\}', Name.Variable),
834
837
  (r'\$+[a-zA-Z_][a-zA-Z0-9_]*', Name.Variable),
@@ -998,7 +1001,7 @@ class XmlLexer(RegexLexer):
998
1001
  Generic lexer for XML (eXtensible Markup Language).
999
1002
  """
1000
1003
 
1001
- flags = re.MULTILINE | re.DOTALL
1004
+ flags = re.MULTILINE | re.DOTALL | re.UNICODE
1002
1005
 
1003
1006
  name = 'XML'
1004
1007
  aliases = ['xml']
@@ -1014,8 +1017,8 @@ class XmlLexer(RegexLexer):
1014
1017
  ('<!--', Comment, 'comment'),
1015
1018
  (r'<\?.*?\?>', Comment.Preproc),
1016
1019
  ('<![^>]*>', Comment.Preproc),
1017
- (r'<\s*[a-zA-Z0-9:._-]+', Name.Tag, 'tag'),
1018
- (r'<\s*/\s*[a-zA-Z0-9:._-]+\s*>', Name.Tag),
1020
+ (r'<\s*[\w:.-]+', Name.Tag, 'tag'),
1021
+ (r'<\s*/\s*[\w:.-]+\s*>', Name.Tag),
1019
1022
  ],
1020
1023
  'comment': [
1021
1024
  ('[^-]+', Comment),
@@ -1024,7 +1027,7 @@ class XmlLexer(RegexLexer):
1024
1027
  ],
1025
1028
  'tag': [
1026
1029
  (r'\s+', Text),
1027
- (r'[a-zA-Z0-9_.:-]+\s*=', Name.Attribute, 'attr'),
1030
+ (r'[\w.:-]+\s*=', Name.Attribute, 'attr'),
1028
1031
  (r'/?\s*>', Name.Tag, '#pop'),
1029
1032
  ],
1030
1033
  'attr': [
@@ -1049,7 +1052,7 @@ class XsltLexer(XmlLexer):
1049
1052
 
1050
1053
  name = 'XSLT'
1051
1054
  aliases = ['xslt']
1052
- filenames = ['*.xsl', '*.xslt']
1055
+ filenames = ['*.xsl', '*.xslt', '*.xpl'] # xpl is XProc
1053
1056
  mimetypes = ['application/xsl+xml', 'application/xslt+xml']
1054
1057
 
1055
1058
  EXTRA_KEYWORDS = set([
@@ -1081,6 +1084,8 @@ class MxmlLexer(RegexLexer):
1081
1084
  """
1082
1085
  For MXML markup.
1083
1086
  Nested AS3 in <script> tags is highlighted by the appropriate lexer.
1087
+
1088
+ *New in Pygments 1.1.*
1084
1089
  """
1085
1090
  flags = re.MULTILINE | re.DOTALL
1086
1091
  name = 'MXML'
@@ -1122,6 +1127,8 @@ class MxmlLexer(RegexLexer):
1122
1127
  class HaxeLexer(RegexLexer):
1123
1128
  """
1124
1129
  For haXe source code (http://haxe.org/).
1130
+
1131
+ *New in Pygments 1.3.*
1125
1132
  """
1126
1133
 
1127
1134
  name = 'haXe'
@@ -1797,14 +1804,14 @@ class CoffeeScriptLexer(RegexLexer):
1797
1804
  tokens = {
1798
1805
  'commentsandwhitespace': [
1799
1806
  (r'\s+', Text),
1800
- (r'###.*?###', Comment.Multiline),
1801
- (r'#.*?\n', Comment.Single),
1807
+ (r'###[^#].*?###', Comment.Multiline),
1808
+ (r'#(?!##[^#]).*?\n', Comment.Single),
1802
1809
  ],
1803
1810
  'multilineregex': [
1804
- include('commentsandwhitespace'),
1811
+ (r'[^/#]+', String.Regex),
1805
1812
  (r'///([gim]+\b|\B)', String.Regex, '#pop'),
1806
- (r'/', String.Regex),
1807
- (r'[^/#]+', String.Regex)
1813
+ (r'#{', String.Interpol, 'interpoling_string'),
1814
+ (r'[/#]', String.Regex),
1808
1815
  ],
1809
1816
  'slashstartsregex': [
1810
1817
  include('commentsandwhitespace'),
@@ -1817,28 +1824,32 @@ class CoffeeScriptLexer(RegexLexer):
1817
1824
  # this next expr leads to infinite loops root -> slashstartsregex
1818
1825
  #(r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
1819
1826
  include('commentsandwhitespace'),
1820
- (r'\+\+|--|~|&&|\band\b|\bor\b|\bis\b|\bisnt\b|\bnot\b|\?|:|=|'
1821
- r'\|\||\\(?=\n)|(<<|>>>?|==?|!=?|[-<>+*`%&\|\^/])=?',
1827
+ (r'\+\+|~|&&|\band\b|\bor\b|\bis\b|\bisnt\b|\bnot\b|\?|:|'
1828
+ r'\|\||\\(?=\n)|(<<|>>>?|==?|!=?|'
1829
+ r'=(?!>)|-(?!>)|[<>+*`%&\|\^/])=?',
1822
1830
  Operator, 'slashstartsregex'),
1823
- (r'\([^()]*\)\s*->', Name.Function),
1831
+ (r'(?:\([^()]+\))?\s*[=-]>', Name.Function),
1824
1832
  (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
1825
1833
  (r'[})\].]', Punctuation),
1826
- (r'(for|in|of|while|break|return|continue|switch|when|then|if|else|'
1834
+ (r'(?<![\.\$])(for|own|in|of|while|until|'
1835
+ r'loop|break|return|continue|'
1836
+ r'switch|when|then|if|unless|else|'
1827
1837
  r'throw|try|catch|finally|new|delete|typeof|instanceof|super|'
1828
1838
  r'extends|this|class|by)\b', Keyword, 'slashstartsregex'),
1829
- (r'(true|false|yes|no|on|off|null|NaN|Infinity|undefined)\b',
1839
+ (r'(?<![\.\$])(true|false|yes|no|on|off|null|'
1840
+ r'NaN|Infinity|undefined)\b',
1830
1841
  Keyword.Constant),
1831
1842
  (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
1832
1843
  r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
1833
1844
  r'decodeURIComponent|encodeURI|encodeURIComponent|'
1834
1845
  r'eval|isFinite|isNaN|parseFloat|parseInt|document|window)\b',
1835
1846
  Name.Builtin),
1836
- (r'[$a-zA-Z_][a-zA-Z0-9_\.:]*\s*[:=]\s', Name.Variable,
1847
+ (r'[$a-zA-Z_][a-zA-Z0-9_\.:\$]*\s*[:=]\s', Name.Variable,
1837
1848
  'slashstartsregex'),
1838
- (r'@[$a-zA-Z_][a-zA-Z0-9_\.:]*\s*[:=]\s', Name.Variable.Instance,
1849
+ (r'@[$a-zA-Z_][a-zA-Z0-9_\.:\$]*\s*[:=]\s', Name.Variable.Instance,
1839
1850
  'slashstartsregex'),
1840
1851
  (r'@', Name.Other, 'slashstartsregex'),
1841
- (r'@?[$a-zA-Z_][a-zA-Z0-9_]*', Name.Other, 'slashstartsregex'),
1852
+ (r'@?[$a-zA-Z_][a-zA-Z0-9_\$]*', Name.Other, 'slashstartsregex'),
1842
1853
  (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
1843
1854
  (r'0x[0-9a-fA-F]+', Number.Hex),
1844
1855
  (r'[0-9]+', Number.Integer),
@@ -1880,6 +1891,118 @@ class CoffeeScriptLexer(RegexLexer):
1880
1891
  ],
1881
1892
  }
1882
1893
 
1894
+
1895
+ class LiveScriptLexer(RegexLexer):
1896
+ """
1897
+ For `LiveScript`_ source code.
1898
+
1899
+ .. _LiveScript: http://gkz.github.com/LiveScript/
1900
+
1901
+ New in Pygments 1.6.
1902
+ """
1903
+
1904
+ name = 'LiveScript'
1905
+ aliases = ['live-script', 'livescript']
1906
+ filenames = ['*.ls']
1907
+ mimetypes = ['text/livescript']
1908
+
1909
+ flags = re.DOTALL
1910
+ tokens = {
1911
+ 'commentsandwhitespace': [
1912
+ (r'\s+', Text),
1913
+ (r'/\*.*?\*/', Comment.Multiline),
1914
+ (r'#.*?\n', Comment.Single),
1915
+ ],
1916
+ 'multilineregex': [
1917
+ include('commentsandwhitespace'),
1918
+ (r'//([gim]+\b|\B)', String.Regex, '#pop'),
1919
+ (r'/', String.Regex),
1920
+ (r'[^/#]+', String.Regex)
1921
+ ],
1922
+ 'slashstartsregex': [
1923
+ include('commentsandwhitespace'),
1924
+ (r'//', String.Regex, ('#pop', 'multilineregex')),
1925
+ (r'/(?! )(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
1926
+ r'([gim]+\b|\B)', String.Regex, '#pop'),
1927
+ (r'', Text, '#pop'),
1928
+ ],
1929
+ 'root': [
1930
+ # this next expr leads to infinite loops root -> slashstartsregex
1931
+ #(r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
1932
+ include('commentsandwhitespace'),
1933
+ (r'(?:\([^()]+\))?[ ]*[~-]{1,2}>|'
1934
+ r'(?:\(?[^()\n]+\)?)?[ ]*<[~-]{1,2}', Name.Function),
1935
+ (r'\+\+|&&|(?<![\.\$])\b(?:and|x?or|is|isnt|not)\b|\?|:|=|'
1936
+ r'\|\||\\(?=\n)|(<<|>>>?|==?|!=?|'
1937
+ r'~(?!\~?>)|-(?!\-?>)|<(?!\[)|(?<!\])>|'
1938
+ r'[+*`%&\|\^/])=?',
1939
+ Operator, 'slashstartsregex'),
1940
+ (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
1941
+ (r'[})\].]', Punctuation),
1942
+ (r'(?<![\.\$])(for|own|in|of|while|until|loop|break|'
1943
+ r'return|continue|switch|when|then|if|unless|else|'
1944
+ r'throw|try|catch|finally|new|delete|typeof|instanceof|super|'
1945
+ r'extends|this|class|by|const|var|to|til)\b', Keyword,
1946
+ 'slashstartsregex'),
1947
+ (r'(?<![\.\$])(true|false|yes|no|on|off|'
1948
+ r'null|NaN|Infinity|undefined|void)\b',
1949
+ Keyword.Constant),
1950
+ (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
1951
+ r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
1952
+ r'decodeURIComponent|encodeURI|encodeURIComponent|'
1953
+ r'eval|isFinite|isNaN|parseFloat|parseInt|document|window)\b',
1954
+ Name.Builtin),
1955
+ (r'[$a-zA-Z_][a-zA-Z0-9_\.\-:\$]*\s*[:=]\s', Name.Variable,
1956
+ 'slashstartsregex'),
1957
+ (r'@[$a-zA-Z_][a-zA-Z0-9_\.\-:\$]*\s*[:=]\s', Name.Variable.Instance,
1958
+ 'slashstartsregex'),
1959
+ (r'@', Name.Other, 'slashstartsregex'),
1960
+ (r'@?[$a-zA-Z_][a-zA-Z0-9_\-]*', Name.Other, 'slashstartsregex'),
1961
+ (r'[0-9]+\.[0-9]+([eE][0-9]+)?[fd]?(?:[a-zA-Z_]+)?', Number.Float),
1962
+ (r'[0-9]+(~[0-9a-z]+)?(?:[a-zA-Z_]+)?', Number.Integer),
1963
+ ('"""', String, 'tdqs'),
1964
+ ("'''", String, 'tsqs'),
1965
+ ('"', String, 'dqs'),
1966
+ ("'", String, 'sqs'),
1967
+ (r'\\[\w$-]+', String),
1968
+ (r'<\[.*\]>', String),
1969
+ ],
1970
+ 'strings': [
1971
+ (r'[^#\\\'"]+', String),
1972
+ # note that all coffee script strings are multi-line.
1973
+ # hashmarks, quotes and backslashes must be parsed one at a time
1974
+ ],
1975
+ 'interpoling_string' : [
1976
+ (r'}', String.Interpol, "#pop"),
1977
+ include('root')
1978
+ ],
1979
+ 'dqs': [
1980
+ (r'"', String, '#pop'),
1981
+ (r'\\.|\'', String), # double-quoted string don't need ' escapes
1982
+ (r'#{', String.Interpol, "interpoling_string"),
1983
+ (r'#', String),
1984
+ include('strings')
1985
+ ],
1986
+ 'sqs': [
1987
+ (r"'", String, '#pop'),
1988
+ (r'#|\\.|"', String), # single quoted strings don't need " escapses
1989
+ include('strings')
1990
+ ],
1991
+ 'tdqs': [
1992
+ (r'"""', String, '#pop'),
1993
+ (r'\\.|\'|"', String), # no need to escape quotes in triple-string
1994
+ (r'#{', String.Interpol, "interpoling_string"),
1995
+ (r'#', String),
1996
+ include('strings'),
1997
+ ],
1998
+ 'tsqs': [
1999
+ (r"'''", String, '#pop'),
2000
+ (r'#|\\.|\'|"', String), # no need to escape quotes in triple-strings
2001
+ include('strings')
2002
+ ],
2003
+ }
2004
+
2005
+
1883
2006
  class DuelLexer(RegexLexer):
1884
2007
  """
1885
2008
  Lexer for Duel Views Engine (formerly JBST) markup with JavaScript code blocks.
@@ -2142,8 +2265,8 @@ class XQueryLexer(ExtendedRegexLexer):
2142
2265
  *New in Pygments 1.4.*
2143
2266
  """
2144
2267
  name = 'XQuery'
2145
- aliases = ['xquery', 'xqy']
2146
- filenames = ['*.xqy', '*.xquery']
2268
+ aliases = ['xquery', 'xqy', 'xq', 'xql', 'xqm']
2269
+ filenames = ['*.xqy', '*.xquery', '*.xq', '*.xql', '*.xqm']
2147
2270
  mimetypes = ['text/xquery', 'application/xquery']
2148
2271
 
2149
2272
  xquery_parse_state = []
@@ -2544,8 +2667,8 @@ class XQueryLexer(ExtendedRegexLexer):
2544
2667
  'xml_comment': [
2545
2668
  (r'(-->)', popstate_xmlcomment_callback),
2546
2669
  (r'[^-]{1,2}', Literal),
2547
- (ur'\t|\r|\n|[\u0020-\U0000D7FF]|[\U0000E000-\U0000FFFD]|'
2548
- ur'[\U00010000-\U0010FFFF]', Literal),
2670
+ (ur'\t|\r|\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' +
2671
+ unirange(0x10000, 0x10ffff), Literal),
2549
2672
  ],
2550
2673
  'processing_instruction': [
2551
2674
  (r'\s+', Text, 'processing_instruction_content'),
@@ -2554,13 +2677,13 @@ class XQueryLexer(ExtendedRegexLexer):
2554
2677
  ],
2555
2678
  'processing_instruction_content': [
2556
2679
  (r'\?>', String.Doc, '#pop'),
2557
- (ur'\t|\r|\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|'
2558
- ur'[\U00010000-\U0010FFFF]', Literal),
2680
+ (ur'\t|\r|\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' +
2681
+ unirange(0x10000, 0x10ffff), Literal),
2559
2682
  ],
2560
2683
  'cdata_section': [
2561
2684
  (r']]>', String.Doc, '#pop'),
2562
- (ur'\t|\r|\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|'
2563
- ur'[\U00010000-\U0010FFFF]', Literal),
2685
+ (ur'\t|\r|\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' +
2686
+ unirange(0x10000, 0x10ffff), Literal),
2564
2687
  ],
2565
2688
  'start_tag': [
2566
2689
  include('whitespace'),
@@ -2628,8 +2751,8 @@ class XQueryLexer(ExtendedRegexLexer):
2628
2751
  ],
2629
2752
  'pragmacontents': [
2630
2753
  (r'#\)', Punctuation, 'operator'),
2631
- (ur'\t|\r|\n|[\u0020-\U0000D7FF]|[\U0000E000-\U0000FFFD]|'
2632
- ur'[\U00010000-\U0010FFFF]', Literal),
2754
+ (ur'\t|\r|\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' +
2755
+ unirange(0x10000, 0x10ffff), Literal),
2633
2756
  (r'(\s+)', Text),
2634
2757
  ],
2635
2758
  'occurrenceindicator': [
@@ -2862,3 +2985,226 @@ class DartLexer(RegexLexer):
2862
2985
  (r'\$+', String.Single)
2863
2986
  ]
2864
2987
  }
2988
+
2989
+
2990
+ class LassoLexer(RegexLexer):
2991
+ """
2992
+ For `Lasso <http://www.lassosoft.com/>`_ source code, covering both Lasso 9
2993
+ syntax and LassoScript for Lasso 8.6 and earlier. For Lasso embedded in
2994
+ HTML, use the `LassoHtmlLexer`.
2995
+
2996
+ Additional options accepted:
2997
+
2998
+ `builtinshighlighting`
2999
+ If given and ``True``, highlight builtin tags, types, traits, and
3000
+ methods (default: ``True``).
3001
+ `requiredelimiters`
3002
+ If given and ``True``, only highlight code between delimiters as Lasso
3003
+ (default: ``False``).
3004
+
3005
+ *New in Pygments 1.6.*
3006
+ """
3007
+
3008
+ name = 'Lasso'
3009
+ aliases = ['lasso', 'lassoscript']
3010
+ filenames = ['*.lasso', '*.lasso[89]']
3011
+ alias_filenames = ['*.incl', '*.inc', '*.las']
3012
+ mimetypes = ['text/x-lasso']
3013
+ flags = re.IGNORECASE | re.DOTALL | re.MULTILINE
3014
+
3015
+ tokens = {
3016
+ 'root': [
3017
+ (r'^#!.+lasso9\b', Comment.Preproc, 'lasso'),
3018
+ (r'\s+', Other),
3019
+ (r'\[no_square_brackets\]', Comment.Preproc, 'nosquarebrackets'),
3020
+ (r'\[noprocess\]', Comment.Preproc, ('delimiters', 'noprocess')),
3021
+ (r'\[', Comment.Preproc, ('delimiters', 'squarebrackets')),
3022
+ (r'<\?(LassoScript|lasso|=)', Comment.Preproc,
3023
+ ('delimiters', 'anglebrackets')),
3024
+ (r'<', Other, 'delimiters'),
3025
+ include('lasso'),
3026
+ ],
3027
+ 'nosquarebrackets': [
3028
+ (r'<\?(LassoScript|lasso|=)', Comment.Preproc, 'anglebrackets'),
3029
+ (r'<', Other),
3030
+ (r'[^<]+', Other),
3031
+ ],
3032
+ 'delimiters': [
3033
+ (r'\[no_square_brackets\]', Comment.Preproc, 'nosquarebrackets'),
3034
+ (r'\[noprocess\]', Comment.Preproc, 'noprocess'),
3035
+ (r'\[', Comment.Preproc, 'squarebrackets'),
3036
+ (r'<\?(LassoScript|lasso|=)', Comment.Preproc, 'anglebrackets'),
3037
+ (r'<', Other),
3038
+ (r'[^[<]+', Other),
3039
+ ],
3040
+ 'noprocess': [
3041
+ (r'\[/noprocess\]', Comment.Preproc, '#pop'),
3042
+ (r'\[', Other),
3043
+ (r'[^[]', Other),
3044
+ ],
3045
+ 'squarebrackets': [
3046
+ (r'\]', Comment.Preproc, '#pop'),
3047
+ include('lasso'),
3048
+ ],
3049
+ 'anglebrackets': [
3050
+ (r'\?>', Comment.Preproc, '#pop'),
3051
+ include('lasso'),
3052
+ ],
3053
+ 'lasso': [
3054
+ # whitespace/comments
3055
+ (r'\s+', Text),
3056
+ (r'//.*?\n', Comment.Single),
3057
+ (r'/\*\*!.*?\*/', String.Doc),
3058
+ (r'/\*.*?\*/', Comment.Multiline),
3059
+
3060
+ # literals
3061
+ (r'\d*\.\d+(e[+-]?\d+)?', Number.Float),
3062
+ (r'0x[\da-f]+', Number.Hex),
3063
+ (r'\d+', Number.Integer),
3064
+ (r'([+-]?)(infinity|NaN)\b', bygroups(Operator, Number)),
3065
+ (r"'", String.Single, 'singlestring'),
3066
+ (r'"', String.Double, 'doublestring'),
3067
+ (r'`[^`]*`', String.Backtick),
3068
+
3069
+ # names
3070
+ (r'\$[a-z_][\w.]*', Name.Variable),
3071
+ (r'#[a-z_][\w.]*|#\d+', Name.Variable.Instance),
3072
+ (r"(\.)('[a-z_][\w.]*')",
3073
+ bygroups(Name.Builtin.Pseudo, Name.Variable.Class)),
3074
+ (r"(self)(->)('[a-z_][\w.]*')",
3075
+ bygroups(Name.Builtin.Pseudo, Operator, Name.Variable.Class)),
3076
+ (r'(\.\.?)([a-z_][\w.]*)',
3077
+ bygroups(Name.Builtin.Pseudo, Name.Other)),
3078
+ (r'(self|inherited|global|void)\b', Name.Builtin.Pseudo),
3079
+ (r'-[a-z_][\w.]*', Name.Attribute),
3080
+ (r'(::\s*)([a-z_][\w.]*)', bygroups(Punctuation, Name.Label)),
3081
+ (r'(error_(code|msg)_\w+|Error_AddError|Error_ColumnRestriction|'
3082
+ r'Error_DatabaseConnectionUnavailable|Error_DatabaseTimeout|'
3083
+ r'Error_DeleteError|Error_FieldRestriction|Error_FileNotFound|'
3084
+ r'Error_InvalidDatabase|Error_InvalidPassword|'
3085
+ r'Error_InvalidUsername|Error_ModuleNotFound|'
3086
+ r'Error_NoError|Error_NoPermission|Error_OutOfMemory|'
3087
+ r'Error_ReqColumnMissing|Error_ReqFieldMissing|'
3088
+ r'Error_RequiredColumnMissing|Error_RequiredFieldMissing|'
3089
+ r'Error_UpdateError)\b', Name.Exception),
3090
+
3091
+ # definitions
3092
+ (r'(define)(\s+)([a-z_][\w.]*)(\s*)(=>)(\s*)(type|trait|thread)\b',
3093
+ bygroups(Keyword.Declaration, Text, Name.Class, Text, Operator,
3094
+ Text, Keyword)),
3095
+ (r'(define)(\s+)([a-z_][\w.]*)(->)([a-z_][\w.]*=?|[-+*/%<>]|==)',
3096
+ bygroups(Keyword.Declaration, Text, Name.Class, Operator,
3097
+ Name.Function), 'signature'),
3098
+ (r'(define)(\s+)([a-z_][\w.]*)',
3099
+ bygroups(Keyword.Declaration, Text, Name.Function),
3100
+ 'signature'),
3101
+ (r'(public|protected|private|provide)(\s+)([a-z_][\w.]*=?|'
3102
+ r'[-+*/%<>]|==)(\s*)(\()',
3103
+ bygroups(Keyword, Text, Name.Function, Text, Punctuation),
3104
+ ('signature', 'parameter')),
3105
+ (r'(public|protected|private)(\s+)([a-z_][\w.]*)',
3106
+ bygroups(Keyword, Text, Name.Function)),
3107
+
3108
+ # keywords
3109
+ (r'(true|false|none|minimal|full|all)\b', Keyword.Constant),
3110
+ (r'(local|var|variable|data)\b', Keyword.Declaration),
3111
+ (r'(array|date|decimal|duration|integer|map|pair|string|tag|xml|'
3112
+ r'null)\b', Keyword.Type),
3113
+ (r'([a-z_][\w.]*)(\s+)(in)\b', bygroups(Name, Text, Keyword)),
3114
+ (r'(let|into)(\s+)([a-z_][\w.]*)', bygroups(Keyword, Text, Name)),
3115
+ (r'(/?)(Cache|Database_Names|Database_SchemaNames|'
3116
+ r'Database_TableNames|Define_Tag|Define_Type|Email_Batch|'
3117
+ r'Encode_Set|HTML_Comment|Handle|Handle_Error|Header|If|Inline|'
3118
+ r'Iterate|LJAX_Target|Link|Link_CurrentAction|Link_CurrentGroup|'
3119
+ r'Link_CurrentRecord|Link_Detail|Link_FirstGroup|'
3120
+ r'Link_FirstRecord|Link_LastGroup|Link_LastRecord|Link_NextGroup|'
3121
+ r'Link_NextRecord|Link_PrevGroup|Link_PrevRecord|Log|Loop|'
3122
+ r'Namespace_Using|NoProcess|Output_None|Portal|Private|Protect|'
3123
+ r'Records|Referer|Referrer|Repeating|ResultSet|Rows|Search_Args|'
3124
+ r'Search_Arguments|Select|Sort_Args|Sort_Arguments|Thread_Atomic|'
3125
+ r'Value_List|While|Abort|Case|Else|If_Empty|If_False|If_Null|'
3126
+ r'If_True|Loop_Abort|Loop_Continue|Loop_Count|Params|Params_Up|'
3127
+ r'Return|Return_Value|Run_Children|SOAP_DefineTag|'
3128
+ r'SOAP_LastRequest|SOAP_LastResponse|Tag_Name|ascending|average|'
3129
+ r'by|define|descending|do|equals|frozen|group|handle_failure|'
3130
+ r'import|in|into|join|let|match|max|min|on|order|parent|protected|'
3131
+ r'provide|public|require|skip|split_thread|sum|take|thread|to|'
3132
+ r'trait|type|where|with|yield)\b', bygroups(Punctuation, Keyword)),
3133
+
3134
+ # other
3135
+ (r'(and|or|not)\b', Operator.Word),
3136
+ (r'([a-z_][\w.]*)(\s*)(::\s*)([a-z_][\w.]*)(\s*)(=)',
3137
+ bygroups(Name, Text, Punctuation, Name.Label, Text, Operator)),
3138
+ (r'((?<!->)[a-z_][\w.]*)(\s*)(=(?!=))',
3139
+ bygroups(Name, Text, Operator)),
3140
+ (r'(/?)([\w.]+)', bygroups(Punctuation, Name.Other)),
3141
+ (r'(=)(bw|ew|cn|lte?|gte?|n?eq|ft|n?rx)\b',
3142
+ bygroups(Operator, Operator.Word)),
3143
+ (r':=|[-+*/%=<>&|!?\\]+', Operator),
3144
+ (r'[{}():;,@^]', Punctuation),
3145
+ ],
3146
+ 'singlestring': [
3147
+ (r"'", String.Single, '#pop'),
3148
+ (r"[^'\\]+", String.Single),
3149
+ include('escape'),
3150
+ (r"\\+", String.Single),
3151
+ ],
3152
+ 'doublestring': [
3153
+ (r'"', String.Double, '#pop'),
3154
+ (r'[^"\\]+', String.Double),
3155
+ include('escape'),
3156
+ (r'\\+', String.Double),
3157
+ ],
3158
+ 'escape': [
3159
+ (r'\\(U[\da-f]{8}|u[\da-f]{4}|x[\da-f]{1,2}|[0-7]{1,3}|:[^:]+:|'
3160
+ r'[abefnrtv?\"\'\\]|$)', String.Escape),
3161
+ ],
3162
+ 'signature': [
3163
+ (r'[(,]', Punctuation, 'parameter'),
3164
+ (r'=>', Operator, '#pop'),
3165
+ include('lasso'),
3166
+ ],
3167
+ 'parameter': [
3168
+ (r'\.\.\.', Name.Builtin.Pseudo),
3169
+ (r'-?[a-z_][\w.]*', Name.Attribute, '#pop'),
3170
+ (r'\)', Punctuation, '#pop'),
3171
+ include('lasso'),
3172
+ ],
3173
+ }
3174
+
3175
+ def __init__(self, **options):
3176
+ self.builtinshighlighting = get_bool_opt(
3177
+ options, 'builtinshighlighting', True)
3178
+ self.requiredelimiters = get_bool_opt(
3179
+ options, 'requiredelimiters', False)
3180
+
3181
+ self._builtins = set()
3182
+ if self.builtinshighlighting:
3183
+ from pygments.lexers._lassobuiltins import BUILTINS
3184
+ for key, value in BUILTINS.iteritems():
3185
+ self._builtins.update(value)
3186
+ RegexLexer.__init__(self, **options)
3187
+
3188
+ def get_tokens_unprocessed(self, text):
3189
+ stack = ['root']
3190
+ if self.requiredelimiters:
3191
+ stack.append('delimiters')
3192
+ for index, token, value in \
3193
+ RegexLexer.get_tokens_unprocessed(self, text, stack):
3194
+ if token is Name.Other:
3195
+ if value.lower() in self._builtins:
3196
+ yield index, Name.Builtin, value
3197
+ continue
3198
+ yield index, token, value
3199
+
3200
+ def analyse_text(text):
3201
+ rv = 0.0
3202
+ if 'bin/lasso9' in text:
3203
+ rv += 0.8
3204
+ if re.search(r'<\?(=|lasso)', text, re.I):
3205
+ rv += 0.4
3206
+ if re.search(r'local\(', text, re.I):
3207
+ rv += 0.4
3208
+ if re.search(r'\[\n|\?>', text):
3209
+ rv += 0.4
3210
+ return rv