pygments.rb 0.3.6 → 0.3.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/Rakefile CHANGED
@@ -9,9 +9,7 @@ task :default => :test
9
9
 
10
10
  GEMSPEC = eval(File.read('pygments.rb.gemspec'))
11
11
 
12
- require 'rake/gempackagetask'
13
- Rake::GemPackageTask.new(GEMSPEC) do |pkg|
14
- end
12
+ require 'rubygems/package_task'
15
13
 
16
14
  # ==========================================================
17
15
  # Testing
data/lexers CHANGED
Binary file
@@ -129,6 +129,9 @@ module Pygments
129
129
  :filenames => lxr[2],
130
130
  :mimetypes => lxr[3]
131
131
  }
132
+ hash["Augeas"] = {:name=>"Augeas", :aliases=>["augeas"], :filenames=>["*.aug"], :mimetypes=>[]}
133
+ hash["dasm16"] = {:name=>"dasm16", :aliases=>["DASM16"], :filenames=>["*.dasm16", "*.dasm"], :mimetypes=>['text/x-dasm16']}
134
+ hash["Puppet"] = {:name=>"Puppet", :aliases=>["puppet"], :filenames=>["*.pp"], :mimetypes=>[]}
132
135
  hash
133
136
  end
134
137
  end
@@ -1,3 +1,3 @@
1
1
  module Pygments
2
- VERSION = '0.3.6'
2
+ VERSION = '0.3.7'
3
3
  end
@@ -163,6 +163,10 @@ class Lexer(object):
163
163
  text = decoded
164
164
  else:
165
165
  text = text.decode(self.encoding)
166
+ else:
167
+ if text.startswith(u'\ufeff'):
168
+ text = text[len(u'\ufeff'):]
169
+
166
170
  # text now *is* a unicode string
167
171
  text = text.replace('\r\n', '\n')
168
172
  text = text.replace('\r', '\n')
@@ -31,8 +31,8 @@ LEXERS = {
31
31
  'AppleScriptLexer': ('pygments.lexers.other', 'AppleScript', ('applescript',), ('*.applescript',), ()),
32
32
  'AspectJLexer': ('pygments.lexers.jvm', 'AspectJ', ('aspectj',), ('*.aj',), ('text/x-aspectj',)),
33
33
  'AsymptoteLexer': ('pygments.lexers.other', 'Asymptote', ('asy', 'asymptote'), ('*.asy',), ('text/x-asymptote',)),
34
- 'AutohotkeyLexer': ('pygments.lexers.other', 'autohotkey', ('ahk',), ('*.ahk', '*.ahkl'), ('text/x-autohotkey',)),
35
34
  'AugeasLexer': ('pygments.lexers.github', 'Augeas', ('augeas',), ('*.aug',), ()),
35
+ 'AutohotkeyLexer': ('pygments.lexers.other', 'autohotkey', ('ahk',), ('*.ahk', '*.ahkl'), ('text/x-autohotkey',)),
36
36
  'AwkLexer': ('pygments.lexers.other', 'Awk', ('awk', 'gawk', 'mawk', 'nawk'), ('*.awk',), ('application/x-awk',)),
37
37
  'BBCodeLexer': ('pygments.lexers.text', 'BBCode', ('bbcode',), (), ('text/x-bbcode',)),
38
38
  'BaseMakefileLexer': ('pygments.lexers.text', 'Base Makefile', ('basemake',), (), ()),
@@ -74,10 +74,10 @@ LEXERS = {
74
74
  'CudaLexer': ('pygments.lexers.compiled', 'CUDA', ('cuda', 'cu'), ('*.cu', '*.cuh'), ('text/x-cuda',)),
75
75
  'CythonLexer': ('pygments.lexers.compiled', 'Cython', ('cython', 'pyx'), ('*.pyx', '*.pxd', '*.pxi'), ('text/x-cython', 'application/x-cython')),
76
76
  'DLexer': ('pygments.lexers.compiled', 'D', ('d',), ('*.d', '*.di'), ('text/x-dsrc',)),
77
- 'Dasm16Lexer': ('pygments.lexers.github', 'dasm16', ('DASM16',), ('*.dasm16', '*.dasm'), ('text/x-dasm16',)),
78
77
  'DObjdumpLexer': ('pygments.lexers.asm', 'd-objdump', ('d-objdump',), ('*.d-objdump',), ('text/x-d-objdump',)),
79
78
  'DarcsPatchLexer': ('pygments.lexers.text', 'Darcs Patch', ('dpatch',), ('*.dpatch', '*.darcspatch'), ()),
80
79
  'DartLexer': ('pygments.lexers.web', 'Dart', ('dart',), ('*.dart',), ('text/x-dart',)),
80
+ 'Dasm16Lexer': ('pygments.lexers.github', 'dasm16', ('DASM16',), ('*.dasm16', '*.dasm'), ('text/x-dasm16',)),
81
81
  'DebianControlLexer': ('pygments.lexers.text', 'Debian Control file', ('control',), ('control',), ()),
82
82
  'DelphiLexer': ('pygments.lexers.compiled', 'Delphi', ('delphi', 'pas', 'pascal', 'objectpascal'), ('*.pas',), ('text/x-pascal',)),
83
83
  'DgLexer': ('pygments.lexers.agile', 'dg', ('dg',), ('*.dg',), ('text/x-dg',)),
@@ -124,6 +124,7 @@ LEXERS = {
124
124
  'HtmlPhpLexer': ('pygments.lexers.templates', 'HTML+PHP', ('html+php',), ('*.phtml',), ('application/x-php', 'application/x-httpd-php', 'application/x-httpd-php3', 'application/x-httpd-php4', 'application/x-httpd-php5')),
125
125
  'HtmlSmartyLexer': ('pygments.lexers.templates', 'HTML+Smarty', ('html+smarty',), (), ('text/html+smarty',)),
126
126
  'HttpLexer': ('pygments.lexers.text', 'HTTP', ('http',), (), ()),
127
+ 'HxmlLexer': ('pygments.lexers.text', 'Hxml', ('haxeml', 'hxml'), ('*.hxml',), ()),
127
128
  'HybrisLexer': ('pygments.lexers.other', 'Hybris', ('hybris', 'hy'), ('*.hy', '*.hyb'), ('text/x-hybris', 'application/x-hybris')),
128
129
  'IniLexer': ('pygments.lexers.text', 'INI', ('ini', 'cfg'), ('*.ini', '*.cfg'), ('text/x-ini',)),
129
130
  'IoLexer': ('pygments.lexers.agile', 'Io', ('io',), ('*.io',), ('text/x-iosrc',)),
@@ -150,8 +150,8 @@ class RowSplitter(object):
150
150
  _pipe_splitter = re.compile('((?:^| +)\|(?: +|$))')
151
151
 
152
152
  def split(self, row):
153
- splitter = self._split_from_spaces \
154
- if not row.startswith('| ') else self._split_from_pipes
153
+ splitter = (row.startswith('| ') and self._split_from_pipes
154
+ or self._split_from_spaces)
155
155
  for value in splitter(row.rstrip()):
156
156
  yield value
157
157
  yield '\n'
@@ -300,7 +300,7 @@ class ForLoop(Tokenizer):
300
300
  self._in_arguments = False
301
301
 
302
302
  def _tokenize(self, value, index):
303
- token = ARGUMENT if self._in_arguments else SYNTAX
303
+ token = self._in_arguments and ARGUMENT or SYNTAX
304
304
  if value.upper() in ('IN', 'IN RANGE'):
305
305
  self._in_arguments = True
306
306
  return token
@@ -240,8 +240,8 @@ class LlvmLexer(RegexLexer):
240
240
  r'|linkonce_odr|weak|weak_odr|appending|dllimport|dllexport'
241
241
  r'|common|default|hidden|protected|extern_weak|external'
242
242
  r'|thread_local|zeroinitializer|undef|null|to|tail|target|triple'
243
- r'|datalayout|volatile|nuw|nsw|exact|inbounds|align'
244
- r'|addrspace|section|alias|module|asm|sideeffect|gc|dbg'
243
+ r'|datalayout|volatile|nuw|nsw|nnan|ninf|nsz|arcp|fast|exact|inbounds'
244
+ r'|align|addrspace|section|alias|module|asm|sideeffect|gc|dbg'
245
245
 
246
246
  r'|ccc|fastcc|coldcc|x86_stdcallcc|x86_fastcallcc|arm_apcscc'
247
247
  r'|arm_aapcscc|arm_aapcs_vfpcc'
@@ -675,7 +675,7 @@ class ClojureLexer(RegexLexer):
675
675
  (r'::?' + valid_name, String.Symbol),
676
676
 
677
677
  # special operators
678
- (r'~@|[`\'#^~&]', Operator),
678
+ (r'~@|[`\'#^~&@]', Operator),
679
679
 
680
680
  # highlight the special forms
681
681
  (_multi_escape(special_forms), Keyword),
@@ -11,6 +11,7 @@
11
11
 
12
12
  import re
13
13
 
14
+ from pygments.util import shebang_matches
14
15
  from pygments.lexer import Lexer, RegexLexer, bygroups, include, \
15
16
  combined, do_insertions
16
17
  from pygments.token import Comment, String, Punctuation, Keyword, Name, \
@@ -342,6 +343,10 @@ class MatlabLexer(RegexLexer):
342
343
  # (not great, but handles common cases...)
343
344
  (r'(?<=[\w\)\]])\'', Operator),
344
345
 
346
+ (r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
347
+ (r'\d+[eEf][+-]?[0-9]+', Number.Float),
348
+ (r'\d+', Number.Integer),
349
+
345
350
  (r'(?<![\w\)\]])\'', String, 'string'),
346
351
  ('[a-zA-Z_][a-zA-Z0-9_]*', Name),
347
352
  (r'.', Text),
@@ -788,6 +793,10 @@ class OctaveLexer(RegexLexer):
788
793
 
789
794
  (r'"[^"]*"', String),
790
795
 
796
+ (r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
797
+ (r'\d+[eEf][+-]?[0-9]+', Number.Float),
798
+ (r'\d+', Number.Integer),
799
+
791
800
  # quote can be transpose, instead of string:
792
801
  # (not great, but handles common cases...)
793
802
  (r'(?<=[\w\)\]])\'', Operator),
@@ -859,6 +868,10 @@ class ScilabLexer(RegexLexer):
859
868
  (r'(?<=[\w\)\]])\'', Operator),
860
869
  (r'(?<![\w\)\]])\'', String, 'string'),
861
870
 
871
+ (r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
872
+ (r'\d+[eEf][+-]?[0-9]+', Number.Float),
873
+ (r'\d+', Number.Integer),
874
+
862
875
  ('[a-zA-Z_][a-zA-Z0-9_]*', Name),
863
876
  (r'.', Text),
864
877
  ],
@@ -25,7 +25,7 @@ __all__ = ['IniLexer', 'PropertiesLexer', 'SourcesListLexer', 'BaseMakefileLexer
25
25
  'RstLexer', 'VimLexer', 'GettextLexer', 'SquidConfLexer',
26
26
  'DebianControlLexer', 'DarcsPatchLexer', 'YamlLexer',
27
27
  'LighttpdConfLexer', 'NginxConfLexer', 'CMakeLexer', 'HttpLexer',
28
- 'PyPyLogLexer', 'RegeditLexer']
28
+ 'PyPyLogLexer', 'RegeditLexer', 'HxmlLexer']
29
29
 
30
30
 
31
31
  class IniLexer(RegexLexer):
@@ -1749,8 +1749,8 @@ class PyPyLogLexer(RegexLexer):
1749
1749
  ],
1750
1750
  "jit-log": [
1751
1751
  (r"\[\w+\] jit-log-.*?}$", Keyword, "#pop"),
1752
-
1753
1752
  (r"^\+\d+: ", Comment),
1753
+ (r"--end of the loop--", Comment),
1754
1754
  (r"[ifp]\d+", Name),
1755
1755
  (r"ptr\d+", Name),
1756
1756
  (r"(\()(\w+(?:\.\w+)?)(\))",
@@ -1760,7 +1760,7 @@ class PyPyLogLexer(RegexLexer):
1760
1760
  (r"-?\d+", Number.Integer),
1761
1761
  (r"'.*'", String),
1762
1762
  (r"(None|descr|ConstClass|ConstPtr|TargetToken)", Name),
1763
- (r"<.*?>", Name.Builtin),
1763
+ (r"<.*?>+", Name.Builtin),
1764
1764
  (r"(label|debug_merge_point|jump|finish)", Name.Class),
1765
1765
  (r"(int_add_ovf|int_add|int_sub_ovf|int_sub|int_mul_ovf|int_mul|"
1766
1766
  r"int_floordiv|int_mod|int_lshift|int_rshift|int_and|int_or|"
@@ -1800,3 +1800,44 @@ class PyPyLogLexer(RegexLexer):
1800
1800
  (r"#.*?$", Comment),
1801
1801
  ],
1802
1802
  }
1803
+
1804
+
1805
+ class HxmlLexer(RegexLexer):
1806
+ """
1807
+ Lexer for `haXe build <http://haxe.org/doc/compiler>`_ files.
1808
+
1809
+ *New in Pygments 1.6.*
1810
+ """
1811
+ name = 'Hxml'
1812
+ aliases = ['haxeml', 'hxml']
1813
+ filenames = ['*.hxml']
1814
+
1815
+ tokens = {
1816
+ 'root': [
1817
+ # Seperator
1818
+ (r'(--)(next)', bygroups(Punctuation, Generic.Heading)),
1819
+ # Compiler switches with one dash
1820
+ (r'(-)(prompt|debug|v)', bygroups(Punctuation, Keyword.Keyword)),
1821
+ # Compilerswitches with two dashes
1822
+ (r'(--)(neko-source|flash-strict|flash-use-stage|no-opt|no-traces|'
1823
+ r'no-inline|times|no-output)', bygroups(Punctuation, Keyword)),
1824
+ # Targets and other options that take an argument
1825
+ (r'(-)(cpp|js|neko|x|as3|swf9?|swf-lib|php|xml|main|lib|D|resource|'
1826
+ r'cp|cmd)( +)(.+)',
1827
+ bygroups(Punctuation, Keyword, Whitespace, String)),
1828
+ # Options that take only numerical arguments
1829
+ (r'(-)(swf-version)( +)(\d+)',
1830
+ bygroups(Punctuation, Keyword, Number.Integer)),
1831
+ # An Option that defines the size, the fps and the background
1832
+ # color of an flash movie
1833
+ (r'(-)(swf-header)( +)(\d+)(:)(\d+)(:)(\d+)(:)([A-Fa-f0-9]{6})',
1834
+ bygroups(Punctuation, Keyword, Whitespace, Number.Integer,
1835
+ Punctuation, Number.Integer, Punctuation, Number.Integer,
1836
+ Punctuation, Number.Hex)),
1837
+ # options with two dashes that takes arguments
1838
+ (r'(--)(js-namespace|php-front|php-lib|remap|gen-hx-classes)( +)'
1839
+ r'(.+)', bygroups(Punctuation, Keyword, Whitespace, String)),
1840
+ # Single line comment, multiline ones are not allowed.
1841
+ (r'#.*', Comment.Single)
1842
+ ]
1843
+ }
@@ -58,6 +58,8 @@ def check_lexer(lx, absfn, outfn):
58
58
  text = text.strip(b('\n')) + b('\n')
59
59
  try:
60
60
  text = text.decode('utf-8')
61
+ if text.startswith(u'\ufeff'):
62
+ text = text[len(u'\ufeff'):]
61
63
  except UnicodeError:
62
64
  text = text.decode('latin1')
63
65
  ntext = []
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: pygments.rb
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.3.6
4
+ version: 0.3.7
5
5
  prerelease:
6
6
  platform: ruby
7
7
  authors: