pygments.rb 0.5.2 → 0.5.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. data/README.md +2 -0
  2. data/lexers +0 -0
  3. data/lib/pygments/version.rb +1 -1
  4. data/test/test_pygments.rb +1 -1
  5. data/vendor/custom_lexers/github.py +15 -9
  6. data/vendor/pygments-main/AUTHORS +12 -2
  7. data/vendor/pygments-main/CHANGES +52 -2
  8. data/vendor/pygments-main/REVISION +1 -1
  9. data/vendor/pygments-main/docs/src/lexerdevelopment.txt +52 -0
  10. data/vendor/pygments-main/external/lasso-builtins-generator-9.lasso +67 -44
  11. data/vendor/pygments-main/pygmentize +1 -1
  12. data/vendor/pygments-main/pygments/filters/__init__.py +2 -2
  13. data/vendor/pygments-main/pygments/formatter.py +3 -0
  14. data/vendor/pygments-main/pygments/lexers/__init__.py +11 -0
  15. data/vendor/pygments-main/pygments/lexers/_lassobuiltins.py +2880 -3124
  16. data/vendor/pygments-main/pygments/lexers/_mapping.py +30 -20
  17. data/vendor/pygments-main/pygments/lexers/_robotframeworklexer.py +1 -1
  18. data/vendor/pygments-main/pygments/lexers/_stan_builtins.py +206 -20
  19. data/vendor/pygments-main/pygments/lexers/agile.py +378 -5
  20. data/vendor/pygments-main/pygments/lexers/asm.py +2 -2
  21. data/vendor/pygments-main/pygments/lexers/compiled.py +235 -8
  22. data/vendor/pygments-main/pygments/lexers/dotnet.py +88 -47
  23. data/vendor/pygments-main/pygments/lexers/functional.py +195 -62
  24. data/vendor/pygments-main/pygments/lexers/github.py +15 -9
  25. data/vendor/pygments-main/pygments/lexers/jvm.py +14 -11
  26. data/vendor/pygments-main/pygments/lexers/math.py +284 -18
  27. data/vendor/pygments-main/pygments/lexers/other.py +132 -21
  28. data/vendor/pygments-main/pygments/lexers/shell.py +29 -15
  29. data/vendor/pygments-main/pygments/lexers/sql.py +1 -1
  30. data/vendor/pygments-main/pygments/lexers/templates.py +8 -8
  31. data/vendor/pygments-main/pygments/lexers/text.py +59 -9
  32. data/vendor/pygments-main/pygments/lexers/web.py +832 -210
  33. data/vendor/pygments-main/pygments/modeline.py +40 -0
  34. data/vendor/pygments-main/tests/examplefiles/Deflate.fs +578 -0
  35. data/vendor/pygments-main/tests/examplefiles/Get-CommandDefinitionHtml.ps1 +66 -0
  36. data/vendor/pygments-main/tests/examplefiles/IPDispatchC.nc +104 -0
  37. data/vendor/pygments-main/tests/examplefiles/IPDispatchP.nc +671 -0
  38. data/vendor/pygments-main/tests/examplefiles/RoleQ.pm6 +23 -0
  39. data/vendor/pygments-main/tests/examplefiles/example.ceylon +29 -10
  40. data/vendor/pygments-main/tests/examplefiles/example.clay +33 -0
  41. data/vendor/pygments-main/tests/examplefiles/example.hx +142 -0
  42. data/vendor/pygments-main/tests/examplefiles/example.lagda +19 -0
  43. data/vendor/pygments-main/tests/examplefiles/example.rexx +50 -0
  44. data/vendor/pygments-main/tests/examplefiles/example.stan +86 -75
  45. data/vendor/pygments-main/tests/examplefiles/garcia-wachs.kk +40 -30
  46. data/vendor/pygments-main/tests/examplefiles/grammar-test.p6 +22 -0
  47. data/vendor/pygments-main/tests/examplefiles/objc_example.m +7 -0
  48. data/vendor/pygments-main/tests/examplefiles/py3tb_test.py3tb +4 -0
  49. data/vendor/pygments-main/tests/examplefiles/swig_java.swg +1329 -0
  50. data/vendor/pygments-main/tests/examplefiles/swig_std_vector.i +225 -0
  51. data/vendor/pygments-main/tests/examplefiles/test.agda +102 -0
  52. data/vendor/pygments-main/tests/examplefiles/test.bb +95 -0
  53. data/vendor/pygments-main/tests/examplefiles/test.ebnf +31 -0
  54. data/vendor/pygments-main/tests/examplefiles/test.p6 +252 -0
  55. data/vendor/pygments-main/tests/examplefiles/type.lisp +16 -0
  56. data/vendor/pygments-main/tests/test_basic_api.py +3 -3
  57. data/vendor/pygments-main/tests/test_lexers_other.py +68 -0
  58. metadata +21 -2
@@ -14,7 +14,8 @@ import re
14
14
  from pygments.lexer import RegexLexer, include, bygroups, using, \
15
15
  this, combined, ExtendedRegexLexer
16
16
  from pygments.token import Error, Punctuation, Literal, Token, \
17
- Text, Comment, Operator, Keyword, Name, String, Number, Generic
17
+ Text, Comment, Operator, Keyword, Name, String, Number, Generic, \
18
+ Whitespace
18
19
  from pygments.util import get_bool_opt
19
20
  from pygments.lexers.web import HtmlLexer
20
21
 
@@ -35,7 +36,7 @@ __all__ = ['BrainfuckLexer', 'BefungeLexer', 'RedcodeLexer', 'MOOCodeLexer',
35
36
  'ECLLexer', 'UrbiscriptLexer', 'OpenEdgeLexer', 'BroLexer',
36
37
  'MscgenLexer', 'KconfigLexer', 'VGLLexer', 'SourcePawnLexer',
37
38
  'RobotFrameworkLexer', 'PuppetLexer', 'NSISLexer', 'RPMSpecLexer',
38
- 'CbmBasicV2Lexer', 'AutoItLexer']
39
+ 'CbmBasicV2Lexer', 'AutoItLexer', 'RexxLexer']
39
40
 
40
41
 
41
42
  class ECLLexer(RegexLexer):
@@ -245,7 +246,7 @@ class MOOCodeLexer(RegexLexer):
245
246
  """
246
247
  name = 'MOOCode'
247
248
  filenames = ['*.moo']
248
- aliases = ['moocode']
249
+ aliases = ['moocode', 'moo']
249
250
  mimetypes = ['text/x-moocode']
250
251
 
251
252
  tokens = {
@@ -289,7 +290,7 @@ class SmalltalkLexer(RegexLexer):
289
290
  """
290
291
  name = 'Smalltalk'
291
292
  filenames = ['*.st']
292
- aliases = ['smalltalk', 'squeak']
293
+ aliases = ['smalltalk', 'squeak', 'st']
293
294
  mimetypes = ['text/x-smalltalk']
294
295
 
295
296
  tokens = {
@@ -363,7 +364,7 @@ class SmalltalkLexer(RegexLexer):
363
364
  include('literals'),
364
365
  ],
365
366
  'afterobject' : [
366
- (r'! !$', Keyword , '#pop'), # squeak chunk delimeter
367
+ (r'! !$', Keyword , '#pop'), # squeak chunk delimiter
367
368
  include('whitespaces'),
368
369
  (r'\b(ifTrue:|ifFalse:|whileTrue:|whileFalse:|timesRepeat:)',
369
370
  Name.Builtin, '#pop'),
@@ -1397,8 +1398,6 @@ class RebolLexer(RegexLexer):
1397
1398
  (r';.*\n', Comment),
1398
1399
  (r'%"', Name.Decorator, 'stringFile'),
1399
1400
  (r'%[^(\^{^")\s\[\]]+', Name.Decorator),
1400
- (r'<[a-zA-Z0-9:._-]*>', Name.Tag),
1401
- (r'<[^(<>\s")]+', Name.Tag, 'tag'),
1402
1401
  (r'[+-]?([a-zA-Z]{1,3})?\$\d+(\.\d+)?', Number.Float), # money
1403
1402
  (r'[+-]?\d+\:\d+(\:\d+)?(\.\d+)?', String.Other), # time
1404
1403
  (r'\d+\-[0-9a-zA-Z]+\-\d+(\/\d+\:\d+(\:\d+)?'
@@ -1415,6 +1414,8 @@ class RebolLexer(RegexLexer):
1415
1414
  (r'comment\s', Comment, 'comment'),
1416
1415
  (r'/[^(\^{^")\s/[\]]*', Name.Attribute),
1417
1416
  (r'([^(\^{^")\s/[\]]+)(?=[:({"\s/\[\]])', word_callback),
1417
+ (r'<[a-zA-Z0-9:._-]*>', Name.Tag),
1418
+ (r'<[^(<>\s")]+', Name.Tag, 'tag'),
1418
1419
  (r'([^(\^{^")\s]+)', Text),
1419
1420
  ],
1420
1421
  'string': [
@@ -1961,11 +1962,11 @@ class AsymptoteLexer(RegexLexer):
1961
1962
  from pygments.lexers._asybuiltins import ASYFUNCNAME, ASYVARNAME
1962
1963
  for index, token, value in \
1963
1964
  RegexLexer.get_tokens_unprocessed(self, text):
1964
- if token is Name and value in ASYFUNCNAME:
1965
- token = Name.Function
1966
- elif token is Name and value in ASYVARNAME:
1967
- token = Name.Variable
1968
- yield index, token, value
1965
+ if token is Name and value in ASYFUNCNAME:
1966
+ token = Name.Function
1967
+ elif token is Name and value in ASYVARNAME:
1968
+ token = Name.Variable
1969
+ yield index, token, value
1969
1970
 
1970
1971
 
1971
1972
  class PostScriptLexer(RegexLexer):
@@ -1979,7 +1980,7 @@ class PostScriptLexer(RegexLexer):
1979
1980
  *New in Pygments 1.4.*
1980
1981
  """
1981
1982
  name = 'PostScript'
1982
- aliases = ['postscript']
1983
+ aliases = ['postscript', 'postscr']
1983
1984
  filenames = ['*.ps', '*.eps']
1984
1985
  mimetypes = ['application/postscript']
1985
1986
 
@@ -2067,7 +2068,7 @@ class AutohotkeyLexer(RegexLexer):
2067
2068
  *New in Pygments 1.4.*
2068
2069
  """
2069
2070
  name = 'autohotkey'
2070
- aliases = ['ahk']
2071
+ aliases = ['ahk', 'autohotkey']
2071
2072
  filenames = ['*.ahk', '*.ahkl']
2072
2073
  mimetypes = ['text/x-autohotkey']
2073
2074
 
@@ -2352,7 +2353,7 @@ class ProtoBufLexer(RegexLexer):
2352
2353
  """
2353
2354
 
2354
2355
  name = 'Protocol Buffer'
2355
- aliases = ['protobuf']
2356
+ aliases = ['protobuf', 'proto']
2356
2357
  filenames = ['*.proto']
2357
2358
 
2358
2359
  tokens = {
@@ -2839,8 +2840,8 @@ class BroLexer(RegexLexer):
2839
2840
  (r'\\\n', Text),
2840
2841
  # Keywords
2841
2842
  (r'(add|alarm|break|case|const|continue|delete|do|else|enum|event'
2842
- r'|export|for|function|if|global|local|module|next'
2843
- r'|of|print|redef|return|schedule|type|when|while)\b', Keyword),
2843
+ r'|export|for|function|if|global|hook|local|module|next'
2844
+ r'|of|print|redef|return|schedule|switch|type|when|while)\b', Keyword),
2844
2845
  (r'(addr|any|bool|count|counter|double|file|int|interval|net'
2845
2846
  r'|pattern|port|record|set|string|subnet|table|time|timer'
2846
2847
  r'|vector)\b', Keyword.Type),
@@ -3306,7 +3307,7 @@ class NSISLexer(RegexLexer):
3306
3307
  tokens = {
3307
3308
  'root': [
3308
3309
  (r'[;\#].*\n', Comment),
3309
- (r"'.*'", String.Single),
3310
+ (r"'.*?'", String.Single),
3310
3311
  (r'"', String.Double, 'str_double'),
3311
3312
  (r'`', String.Backtick, 'str_backtick'),
3312
3313
  include('macro'),
@@ -3457,7 +3458,7 @@ class RPMSpecLexer(RegexLexer):
3457
3458
  include('macro'),
3458
3459
  (r'(?i)^(Name|Version|Release|Epoch|Summary|Group|License|Packager|'
3459
3460
  r'Vendor|Icon|URL|Distribution|Prefix|Patch[0-9]*|Source[0-9]*|'
3460
- r'Requires\(?[a-z]*\)?|[a-z]+Req|Obsoletes|Provides|Conflicts|'
3461
+ r'Requires\(?[a-z]*\)?|[a-z]+Req|Obsoletes|Suggests|Provides|Conflicts|'
3461
3462
  r'Build[a-z]+|[a-z]+Arch|Auto[a-z]+)(:)(.*)$',
3462
3463
  bygroups(Generic.Heading, Punctuation, using(this))),
3463
3464
  (r'^%description', Name.Decorator, 'description'),
@@ -3467,7 +3468,7 @@ class RPMSpecLexer(RegexLexer):
3467
3468
  r'make(?:install)|ghost|patch[0-9]+|find_lang|exclude|verify)',
3468
3469
  Keyword),
3469
3470
  include('interpol'),
3470
- (r"'.*'", String.Single),
3471
+ (r"'.*?'", String.Single),
3471
3472
  (r'"', String.Double, 'string'),
3472
3473
  (r'.', Text),
3473
3474
  ],
@@ -3624,7 +3625,7 @@ class AutoItLexer(RegexLexer):
3624
3625
  (r'[a-zA-Z_#@$][a-zA-Z0-9_#@$]*', Name),
3625
3626
  (r'\\|\'', Text),
3626
3627
  (r'\`([\,\%\`abfnrtv\-\+;])', String.Escape),
3627
- (r'_\n', Text), # Line continuation
3628
+ (r'_\n', Text), # Line continuation
3628
3629
  include('garbage'),
3629
3630
  ],
3630
3631
  'commands': [
@@ -3665,3 +3666,113 @@ class AutoItLexer(RegexLexer):
3665
3666
  (r'[^\S\n]', Text),
3666
3667
  ],
3667
3668
  }
3669
+
3670
+
3671
+ class RexxLexer(RegexLexer):
3672
+ """
3673
+ `Rexx <http://www.rexxinfo.org/>`_ is a scripting language available for
3674
+ a wide range of different platforms with its roots found on mainframe
3675
+ systems. It is popular for I/O- and data based tasks and can act as glue
3676
+ language to bind different applications together.
3677
+
3678
+ *New in Pygments 1.7.*
3679
+ """
3680
+ name = 'Rexx'
3681
+ aliases = ['rexx', 'ARexx', 'arexx']
3682
+ filenames = ['*.rexx', '*.rex', '*.rx', '*.arexx']
3683
+ mimetypes = ['text/x-rexx']
3684
+ flags = re.IGNORECASE
3685
+
3686
+ tokens = {
3687
+ 'root': [
3688
+ (r'\s', Whitespace),
3689
+ (r'/\*', Comment.Multiline, 'comment'),
3690
+ (r'"', String, 'string_double'),
3691
+ (r"'", String, 'string_single'),
3692
+ (r'[0-9]+(\.[0-9]+)?(e[+-]?[0-9])?', Number),
3693
+ (r'([a-z_][a-z0-9_]*)(\s*)(:)(\s*)(procedure)\b',
3694
+ bygroups(Name.Function, Whitespace, Operator, Whitespace,
3695
+ Keyword.Declaration)),
3696
+ (r'([a-z_][a-z0-9_]*)(\s*)(:)',
3697
+ bygroups(Name.Label, Whitespace, Operator)),
3698
+ include('function'),
3699
+ include('keyword'),
3700
+ include('operator'),
3701
+ (r'[a-z_][a-z0-9_]*', Text),
3702
+ ],
3703
+ 'function': [
3704
+ (r'(abbrev|abs|address|arg|b2x|bitand|bitor|bitxor|c2d|c2x|'
3705
+ r'center|charin|charout|chars|compare|condition|copies|d2c|'
3706
+ r'd2x|datatype|date|delstr|delword|digits|errortext|form|'
3707
+ r'format|fuzz|insert|lastpos|left|length|linein|lineout|lines|'
3708
+ r'max|min|overlay|pos|queued|random|reverse|right|sign|'
3709
+ r'sourceline|space|stream|strip|substr|subword|symbol|time|'
3710
+ r'trace|translate|trunc|value|verify|word|wordindex|'
3711
+ r'wordlength|wordpos|words|x2b|x2c|x2d|xrange)(\s*)(\()',
3712
+ bygroups(Name.Builtin, Whitespace, Operator)),
3713
+ ],
3714
+ 'keyword': [
3715
+ (r'(address|arg|by|call|do|drop|else|end|exit|for|forever|if|'
3716
+ r'interpret|iterate|leave|nop|numeric|off|on|options|parse|'
3717
+ r'pull|push|queue|return|say|select|signal|to|then|trace|until|'
3718
+ r'while)\b', Keyword.Reserved),
3719
+ ],
3720
+ 'operator': [
3721
+ (ur'(-|//|/|\(|\)|\*\*|\*|\\<<|\\<|\\==|\\=|\\>>|\\>|\\|\|\||\||'
3722
+ ur'&&|&|%|\+|<<=|<<|<=|<>|<|==|=|><|>=|>>=|>>|>|¬<<|¬<|¬==|¬=|'
3723
+ ur'¬>>|¬>|¬|\.|,)', Operator),
3724
+ ],
3725
+ 'string_double': [
3726
+ (r'[^"\n]+', String),
3727
+ (r'""', String),
3728
+ (r'"', String, '#pop'),
3729
+ (r'\n', Text, '#pop'), # Stray linefeed also terminates strings.
3730
+ ],
3731
+ 'string_single': [
3732
+ (r'[^\'\n]', String),
3733
+ (r'\'\'', String),
3734
+ (r'\'', String, '#pop'),
3735
+ (r'\n', Text, '#pop'), # Stray linefeed also terminates strings.
3736
+ ],
3737
+ 'comment': [
3738
+ (r'[^*]+', Comment.Multiline),
3739
+ (r'\*/', Comment.Multiline, '#pop'),
3740
+ (r'\*', Comment.Multiline),
3741
+ ]
3742
+ }
3743
+
3744
+ _c = lambda s: re.compile(s, re.MULTILINE)
3745
+ _ADDRESS_COMMAND_PATTERN = _c(r'^\s*address\s+command\b')
3746
+ _ADDRESS_PATTERN = _c(r'^\s*address\s+')
3747
+ _DO_WHILE_PATTERN = _c(r'^\s*do\s+while\b')
3748
+ _IF_THEN_DO_PATTERN = _c(r'^\s*if\b.+\bthen\s+do\s*$')
3749
+ _PROCEDURE_PATTERN = _c(r'^\s*([a-z_][a-z0-9_]*)(\s*)(:)(\s*)(procedure)\b')
3750
+ _ELSE_DO_PATTERN = _c(r'\belse\s+do\s*$')
3751
+ _PARSE_ARG_PATTERN = _c(r'^\s*parse\s+(upper\s+)?(arg|value)\b')
3752
+ PATTERNS_AND_WEIGHTS = (
3753
+ (_ADDRESS_COMMAND_PATTERN, 0.2),
3754
+ (_ADDRESS_PATTERN, 0.05),
3755
+ (_DO_WHILE_PATTERN, 0.1),
3756
+ (_ELSE_DO_PATTERN, 0.1),
3757
+ (_IF_THEN_DO_PATTERN, 0.1),
3758
+ (_PROCEDURE_PATTERN, 0.5),
3759
+ (_PARSE_ARG_PATTERN, 0.2),
3760
+ )
3761
+
3762
+ def analyse_text(text):
3763
+ """
3764
+ Check for inital comment and patterns that distinguish Rexx from other
3765
+ C-like languages.
3766
+ """
3767
+ if re.search(r'/\*\**\s*rexx', text, re.IGNORECASE):
3768
+ # Header matches MVS Rexx requirements, this is certainly a Rexx
3769
+ # script.
3770
+ return 1.0
3771
+ elif text.startswith('/*'):
3772
+ # Header matches general Rexx requirements; the source code might
3773
+ # still be any language using C comments such as C++, C# or Java.
3774
+ lowerText = text.lower()
3775
+ result = sum(weight
3776
+ for (pattern, weight) in RexxLexer.PATTERNS_AND_WEIGHTS
3777
+ if pattern.search(lowerText)) + 0.01
3778
+ return min(result, 1.0)
@@ -67,9 +67,11 @@ class BashLexer(RegexLexer):
67
67
  'data': [
68
68
  (r'(?s)\$?"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
69
69
  (r"(?s)\$?'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
70
- (r';', Text),
70
+ (r';', Punctuation),
71
+ (r'&', Punctuation),
72
+ (r'\|', Punctuation),
71
73
  (r'\s+', Text),
72
- (r'[^=\s\[\]{}()$"\'`\\<]+', Text),
74
+ (r'[^=\s\[\]{}()$"\'`\\<&|;]+', Text),
73
75
  (r'\d+(?= |\Z)', Number),
74
76
  (r'\$#?(\w+|.)', Name.Variable),
75
77
  (r'<', Text),
@@ -99,7 +101,10 @@ class BashLexer(RegexLexer):
99
101
  }
100
102
 
101
103
  def analyse_text(text):
102
- return shebang_matches(text, r'(ba|z|)sh')
104
+ if shebang_matches(text, r'(ba|z|)sh'):
105
+ return 1
106
+ if text.startswith('$ '):
107
+ return 0.2
103
108
 
104
109
 
105
110
  class BashSessionLexer(Lexer):
@@ -206,7 +211,7 @@ class BatchLexer(RegexLexer):
206
211
  *New in Pygments 0.7.*
207
212
  """
208
213
  name = 'Batchfile'
209
- aliases = ['bat']
214
+ aliases = ['bat', 'dosbatch', 'winbatch']
210
215
  filenames = ['*.bat', '*.cmd']
211
216
  mimetypes = ['application/x-dos-batch']
212
217
 
@@ -329,8 +334,8 @@ class PowerShellLexer(RegexLexer):
329
334
  *New in Pygments 1.5.*
330
335
  """
331
336
  name = 'PowerShell'
332
- aliases = ['powershell', 'posh', 'ps1']
333
- filenames = ['*.ps1']
337
+ aliases = ['powershell', 'posh', 'ps1', 'psm1']
338
+ filenames = ['*.ps1','*.psm1']
334
339
  mimetypes = ['text/x-powershell']
335
340
 
336
341
  flags = re.DOTALL | re.IGNORECASE | re.MULTILINE
@@ -342,7 +347,7 @@ class PowerShellLexer(RegexLexer):
342
347
  'dynamicparam do default continue cmdletbinding break begin alias \\? '
343
348
  '% #script #private #local #global mandatory parametersetname position '
344
349
  'valuefrompipeline valuefrompipelinebypropertyname '
345
- 'valuefromremainingarguments helpmessage try catch').split()
350
+ 'valuefromremainingarguments helpmessage try catch throw').split()
346
351
 
347
352
  operators = (
348
353
  'and as band bnot bor bxor casesensitive ccontains ceq cge cgt cle '
@@ -368,12 +373,15 @@ class PowerShellLexer(RegexLexer):
368
373
 
369
374
  tokens = {
370
375
  'root': [
376
+ # we need to count pairs of parentheses for correct highlight
377
+ # of '$(...)' blocks in strings
378
+ (r'\(', Punctuation, 'child'),
371
379
  (r'\s+', Text),
372
380
  (r'^(\s*#[#\s]*)(\.(?:%s))([^\n]*$)' % '|'.join(commenthelp),
373
381
  bygroups(Comment, String.Doc, Comment)),
374
382
  (r'#[^\n]*?$', Comment),
375
383
  (r'(&lt;|<)#', Comment.Multiline, 'multline'),
376
- (r'@"\n.*?\n"@', String.Heredoc),
384
+ (r'@"\n', String.Heredoc, 'heredoc-double'),
377
385
  (r"@'\n.*?\n'@", String.Heredoc),
378
386
  # escaped syntax
379
387
  (r'`[\'"$@-]', Punctuation),
@@ -387,7 +395,11 @@ class PowerShellLexer(RegexLexer):
387
395
  (r'\[[a-z_\[][a-z0-9_. `,\[\]]*\]', Name.Constant), # .net [type]s
388
396
  (r'-[a-z_][a-z0-9_]*', Name),
389
397
  (r'\w+', Name),
390
- (r'[.,{}\[\]$()=+*/\\&%!~?^`|<>-]', Punctuation),
398
+ (r'[.,;@{}\[\]$()=+*/\\&%!~?^`|<>-]|::', Punctuation),
399
+ ],
400
+ 'child': [
401
+ (r'\)', Punctuation, '#pop'),
402
+ include('root'),
391
403
  ],
392
404
  'multline': [
393
405
  (r'[^#&.]+', Comment.Multiline),
@@ -396,15 +408,17 @@ class PowerShellLexer(RegexLexer):
396
408
  (r'[#&.]', Comment.Multiline),
397
409
  ],
398
410
  'string': [
411
+ (r"`[0abfnrtv'\"\$]", String.Escape),
399
412
  (r'[^$`"]+', String.Double),
400
- (r'\$\(', String.Interpol, 'interpol'),
401
- (r'`"|""', String.Double),
413
+ (r'\$\(', Punctuation, 'child'),
414
+ (r'""', String.Double),
402
415
  (r'[`$]', String.Double),
403
416
  (r'"', String.Double, '#pop'),
404
417
  ],
405
- 'interpol': [
406
- (r'[^$)]+', String.Interpol),
407
- (r'\$\(', String.Interpol, '#push'),
408
- (r'\)', String.Interpol, '#pop'),
418
+ 'heredoc-double': [
419
+ (r'\n"@', String.Heredoc, '#pop'),
420
+ (r'\$\(', Punctuation, 'child'),
421
+ (r'[^@\n]+"]', String.Heredoc),
422
+ (r".", String.Heredoc),
409
423
  ]
410
424
  }
@@ -375,7 +375,7 @@ class SqlLexer(RegexLexer):
375
375
  r'DIAGNOSTICS|DICTIONARY|DISCONNECT|DISPATCH|DISTINCT|DO|'
376
376
  r'DOMAIN|DROP|DYNAMIC|DYNAMIC_FUNCTION|DYNAMIC_FUNCTION_CODE|'
377
377
  r'EACH|ELSE|ENCODING|ENCRYPTED|END|END-EXEC|EQUALS|ESCAPE|EVERY|'
378
- r'EXCEPT|ESCEPTION|EXCLUDING|EXCLUSIVE|EXEC|EXECUTE|EXISTING|'
378
+ r'EXCEPTION|EXCEPT|EXCLUDING|EXCLUSIVE|EXEC|EXECUTE|EXISTING|'
379
379
  r'EXISTS|EXPLAIN|EXTERNAL|EXTRACT|FALSE|FETCH|FINAL|FIRST|FOR|'
380
380
  r'FORCE|FOREIGN|FORTRAN|FORWARD|FOUND|FREE|FREEZE|FROM|FULL|'
381
381
  r'FUNCTION|G|GENERAL|GENERATED|GET|GLOBAL|GO|GOTO|GRANT|GRANTED|'
@@ -766,7 +766,7 @@ class CheetahHtmlLexer(DelegatingLexer):
766
766
  """
767
767
 
768
768
  name = 'HTML+Cheetah'
769
- aliases = ['html+cheetah', 'html+spitfire']
769
+ aliases = ['html+cheetah', 'html+spitfire', 'htmlcheetah']
770
770
  mimetypes = ['text/html+cheetah', 'text/html+spitfire']
771
771
 
772
772
  def __init__(self, **options):
@@ -1258,7 +1258,7 @@ class HtmlDjangoLexer(DelegatingLexer):
1258
1258
  """
1259
1259
 
1260
1260
  name = 'HTML+Django/Jinja'
1261
- aliases = ['html+django', 'html+jinja']
1261
+ aliases = ['html+django', 'html+jinja', 'htmldjango']
1262
1262
  alias_filenames = ['*.html', '*.htm', '*.xhtml']
1263
1263
  mimetypes = ['text/html+django', 'text/html+jinja']
1264
1264
 
@@ -1657,7 +1657,7 @@ class LassoHtmlLexer(DelegatingLexer):
1657
1657
  super(LassoHtmlLexer, self).__init__(HtmlLexer, LassoLexer, **options)
1658
1658
 
1659
1659
  def analyse_text(text):
1660
- rv = LassoLexer.analyse_text(text)
1660
+ rv = LassoLexer.analyse_text(text) - 0.01
1661
1661
  if re.search(r'<\w+>', text, re.I):
1662
1662
  rv += 0.2
1663
1663
  if html_doctype_matches(text):
@@ -1683,9 +1683,9 @@ class LassoXmlLexer(DelegatingLexer):
1683
1683
  super(LassoXmlLexer, self).__init__(XmlLexer, LassoLexer, **options)
1684
1684
 
1685
1685
  def analyse_text(text):
1686
- rv = LassoLexer.analyse_text(text)
1686
+ rv = LassoLexer.analyse_text(text) - 0.01
1687
1687
  if looks_like_xml(text):
1688
- rv += 0.5
1688
+ rv += 0.4
1689
1689
  return rv
1690
1690
 
1691
1691
 
@@ -1707,8 +1707,8 @@ class LassoCssLexer(DelegatingLexer):
1707
1707
  super(LassoCssLexer, self).__init__(CssLexer, LassoLexer, **options)
1708
1708
 
1709
1709
  def analyse_text(text):
1710
- rv = LassoLexer.analyse_text(text)
1711
- if re.search(r'\w+:.+;', text):
1710
+ rv = LassoLexer.analyse_text(text) - 0.05
1711
+ if re.search(r'\w+:.+?;', text):
1712
1712
  rv += 0.1
1713
1713
  if 'padding:' in text:
1714
1714
  rv += 0.1
@@ -1736,7 +1736,7 @@ class LassoJavascriptLexer(DelegatingLexer):
1736
1736
  **options)
1737
1737
 
1738
1738
  def analyse_text(text):
1739
- rv = LassoLexer.analyse_text(text)
1739
+ rv = LassoLexer.analyse_text(text) - 0.05
1740
1740
  if 'function' in text:
1741
1741
  rv += 0.2
1742
1742
  return rv
@@ -25,7 +25,7 @@ __all__ = ['IniLexer', 'PropertiesLexer', 'SourcesListLexer', 'BaseMakefileLexer
25
25
  'RstLexer', 'VimLexer', 'GettextLexer', 'SquidConfLexer',
26
26
  'DebianControlLexer', 'DarcsPatchLexer', 'YamlLexer',
27
27
  'LighttpdConfLexer', 'NginxConfLexer', 'CMakeLexer', 'HttpLexer',
28
- 'PyPyLogLexer', 'RegeditLexer', 'HxmlLexer']
28
+ 'PyPyLogLexer', 'RegeditLexer', 'HxmlLexer', 'EbnfLexer']
29
29
 
30
30
 
31
31
  class IniLexer(RegexLexer):
@@ -34,7 +34,7 @@ class IniLexer(RegexLexer):
34
34
  """
35
35
 
36
36
  name = 'INI'
37
- aliases = ['ini', 'cfg']
37
+ aliases = ['ini', 'cfg', 'dosini']
38
38
  filenames = ['*.ini', '*.cfg']
39
39
  mimetypes = ['text/x-ini']
40
40
 
@@ -106,7 +106,7 @@ class PropertiesLexer(RegexLexer):
106
106
  """
107
107
 
108
108
  name = 'Properties'
109
- aliases = ['properties']
109
+ aliases = ['properties', 'jproperties']
110
110
  filenames = ['*.properties']
111
111
  mimetypes = ['text/x-java-properties']
112
112
 
@@ -128,7 +128,7 @@ class SourcesListLexer(RegexLexer):
128
128
  """
129
129
 
130
130
  name = 'Debian Sourcelist'
131
- aliases = ['sourceslist', 'sources.list']
131
+ aliases = ['sourceslist', 'sources.list', 'debsources']
132
132
  filenames = ['sources.list']
133
133
  mimetype = ['application/x-debian-sourceslist']
134
134
 
@@ -1053,7 +1053,7 @@ class DebianControlLexer(RegexLexer):
1053
1053
  *New in Pygments 0.9.*
1054
1054
  """
1055
1055
  name = 'Debian Control file'
1056
- aliases = ['control']
1056
+ aliases = ['control', 'debcontrol']
1057
1057
  filenames = ['control']
1058
1058
 
1059
1059
  tokens = {
@@ -1631,7 +1631,7 @@ class CMakeLexer(RegexLexer):
1631
1631
  # r'VTK_MAKE_INSTANTIATOR|VTK_WRAP_JAVA|VTK_WRAP_PYTHON|'
1632
1632
  # r'VTK_WRAP_TCL|WHILE|WRITE_FILE|'
1633
1633
  # r'COUNTARGS)\b', Name.Builtin, 'args'),
1634
- (r'\b([A-Za-z_]+)([ \t]*)(\()', bygroups(Name.Builtin, Text,
1634
+ (r'\b(\w+)([ \t]*)(\()', bygroups(Name.Builtin, Text,
1635
1635
  Punctuation), 'args'),
1636
1636
  include('keywords'),
1637
1637
  include('ws')
@@ -1709,12 +1709,12 @@ class HttpLexer(RegexLexer):
1709
1709
 
1710
1710
  tokens = {
1711
1711
  'root': [
1712
- (r'(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE)( +)([^ ]+)( +)'
1713
- r'(HTTPS?)(/)(1\.[01])(\r?\n|$)',
1712
+ (r'(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE|PATCH)( +)([^ ]+)( +)'
1713
+ r'(HTTP)(/)(1\.[01])(\r?\n|$)',
1714
1714
  bygroups(Name.Function, Text, Name.Namespace, Text,
1715
1715
  Keyword.Reserved, Operator, Number, Text),
1716
1716
  'headers'),
1717
- (r'(HTTPS?)(/)(1\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|$)',
1717
+ (r'(HTTP)(/)(1\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|$)',
1718
1718
  bygroups(Keyword.Reserved, Operator, Number, Text, Number,
1719
1719
  Text, Name.Exception, Text),
1720
1720
  'headers'),
@@ -1841,3 +1841,53 @@ class HxmlLexer(RegexLexer):
1841
1841
  (r'#.*', Comment.Single)
1842
1842
  ]
1843
1843
  }
1844
+
1845
+
1846
+ class EbnfLexer(RegexLexer):
1847
+ """
1848
+ Lexer for `ISO/IEC 14977 EBNF
1849
+ <http://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_Form>`_
1850
+ grammars.
1851
+
1852
+ *New in Pygments 1.7.*
1853
+ """
1854
+
1855
+ name = 'EBNF'
1856
+ aliases = ['ebnf']
1857
+ filenames = ['*.ebnf']
1858
+ mimetypes = ['text/x-ebnf']
1859
+
1860
+ tokens = {
1861
+ 'root': [
1862
+ include('whitespace'),
1863
+ include('comment_start'),
1864
+ include('identifier'),
1865
+ (r'=', Operator, 'production'),
1866
+ ],
1867
+ 'production': [
1868
+ include('whitespace'),
1869
+ include('comment_start'),
1870
+ include('identifier'),
1871
+ (r'"[^"]*"', String.Double),
1872
+ (r"'[^']*'", String.Single),
1873
+ (r'(\?[^?]*\?)', Name.Entity),
1874
+ (r'[\[\]{}(),|]', Punctuation),
1875
+ (r'-', Operator),
1876
+ (r';', Punctuation, '#pop'),
1877
+ ],
1878
+ 'whitespace': [
1879
+ (r'\s+', Text),
1880
+ ],
1881
+ 'comment_start': [
1882
+ (r'\(\*', Comment.Multiline, 'comment'),
1883
+ ],
1884
+ 'comment': [
1885
+ (r'[^*)]', Comment.Multiline),
1886
+ include('comment_start'),
1887
+ (r'\*\)', Comment.Multiline, '#pop'),
1888
+ (r'[*)]', Comment.Multiline),
1889
+ ],
1890
+ 'identifier': [
1891
+ (r'([a-zA-Z][a-zA-Z0-9 \-]*)', Keyword),
1892
+ ],
1893
+ }