pygments.rb 0.2.12 → 0.2.13

Sign up to get free protection for your applications and to get access to all the features.
@@ -1,3 +1,3 @@
1
1
  module Pygments
2
- VERSION = '0.2.12'
2
+ VERSION = '0.2.13'
3
3
  end
@@ -28,25 +28,36 @@ class Dasm16Lexer(RegexLexer):
28
28
  mimetypes = ['text/x-dasm16']
29
29
 
30
30
  INSTRUCTIONS = [
31
- 'SET', 'ADD', 'SUB', 'MUL', 'DIV', 'MOD', 'SHL',
32
- 'SHR', 'AND', 'BOR', 'XOR', 'IFE', 'IFN', 'IFG', 'IFB',
33
- 'JSR'
31
+ 'SET',
32
+ 'ADD', 'SUB',
33
+ 'MUL', 'MLI',
34
+ 'DIV', 'DVI',
35
+ 'MOD', 'MDI',
36
+ 'AND', 'BOR', 'XOR',
37
+ 'SHR', 'ASR', 'SHL',
38
+ 'IFB', 'IFC', 'IFE', 'IFN', 'IFG', 'IFA', 'IFL', 'IFU',
39
+ 'ADX', 'SBX',
40
+ 'STI', 'STD',
41
+ 'JSR',
42
+ 'INT', 'IAG', 'IAS', 'RFI', 'IAQ', 'HWN', 'HWQ', 'HWI',
34
43
  ]
35
44
 
36
45
  REGISTERS = [
37
46
  'A', 'B', 'C',
38
47
  'X', 'Y', 'Z',
39
48
  'I', 'J',
40
- 'SP', 'PC',
49
+ 'SP', 'PC', 'EX',
41
50
  'POP', 'PEEK', 'PUSH'
42
51
  ]
43
52
 
44
53
  # Regexes yo
45
- string = r'"(\\"|[^"])*"'
46
54
  char = r'[a-zA-Z$._0-9@]'
47
55
  identifier = r'(?:[a-zA-Z$_]' + char + '*|\.' + char + '+)'
48
- number = r'(?:0[xX][a-zA-Z0-9]+|\d+)'
56
+ number = r'[+-]?(?:0[xX][a-zA-Z0-9]+|\d+)'
57
+ binary_number = r'0b[01_]+'
49
58
  instruction = r'(?i)(' + '|'.join(INSTRUCTIONS) + ')'
59
+ single_char = r"'\\?" + char + "'"
60
+ string = r'"(\\"|[^"])*"'
50
61
 
51
62
  def guess_identifier(lexer, match):
52
63
  ident = match.group(0)
@@ -57,14 +68,21 @@ class Dasm16Lexer(RegexLexer):
57
68
  'root': [
58
69
  include('whitespace'),
59
70
  (':' + identifier, Name.Label),
71
+ (identifier + ':', Name.Label),
60
72
  (instruction, Name.Function, 'instruction-args'),
61
- (r'(DAT|dat)', Name.Function, 'data-args'),
73
+ (r'\.' + identifier, Name.Function, 'data-args'),
62
74
  (r'[\r\n]+', Text)
63
75
  ],
64
76
 
77
+ 'numeric' : [
78
+ (binary_number, Number.Integer),
79
+ (number, Number.Integer),
80
+ (single_char, String),
81
+ ],
82
+
65
83
  'arg' : [
66
84
  (identifier, guess_identifier),
67
- (number, Number.Integer),
85
+ include('numeric')
68
86
  ],
69
87
 
70
88
  'deref' : [
@@ -89,7 +107,7 @@ class Dasm16Lexer(RegexLexer):
89
107
 
90
108
  'data-args' : [
91
109
  (r',', Punctuation),
92
- (number, Number.Integer),
110
+ include('numeric'),
93
111
  (string, String),
94
112
  include('instruction-line')
95
113
  ],
@@ -1 +1 @@
1
- 96e8b9cb83ef
1
+ 5cc94956e233
@@ -32,6 +32,9 @@ def escape_tex(text, commandprefix):
32
32
  replace('#', r'\%sZsh{}' % commandprefix). \
33
33
  replace('%', r'\%sZpc{}' % commandprefix). \
34
34
  replace('$', r'\%sZdl{}' % commandprefix). \
35
+ replace('-', r'\%sZhy{}' % commandprefix). \
36
+ replace("'", r'\%sZsq{}' % commandprefix). \
37
+ replace('"', r'\%sZdq{}' % commandprefix). \
35
38
  replace('~', r'\%sZti{}' % commandprefix)
36
39
 
37
40
 
@@ -115,6 +118,9 @@ STYLE_TEMPLATE = r'''
115
118
  \def\%(cp)sZsh{\char`\#}
116
119
  \def\%(cp)sZpc{\char`\%%}
117
120
  \def\%(cp)sZdl{\char`\$}
121
+ \def\%(cp)sZhy{\char`\-}
122
+ \def\%(cp)sZsq{\char`\'}
123
+ \def\%(cp)sZdq{\char`\"}
118
124
  \def\%(cp)sZti{\char`\~}
119
125
  %% for compatibility with earlier versions
120
126
  \def\%(cp)sZat{@}
@@ -544,10 +544,10 @@ class RegexLexer(Lexer):
544
544
  try:
545
545
  if text[pos] == '\n':
546
546
  # at EOL, reset state to "root"
547
- pos += 1
548
547
  statestack = ['root']
549
548
  statetokens = tokendefs['root']
550
549
  yield pos, Text, u'\n'
550
+ pos += 1
551
551
  continue
552
552
  yield pos, Error, text[pos]
553
553
  pos += 1
@@ -204,17 +204,18 @@ DATATYPES = [
204
204
  'bigint', 'bigserial', 'bit', 'bit varying', 'bool', 'boolean', 'box',
205
205
  'bytea', 'char', 'character', 'character varying', 'cidr', 'circle',
206
206
  'date', 'decimal', 'double precision', 'float4', 'float8', 'inet',
207
- 'int', 'int2', 'int4', 'int8', 'integer', 'interval', 'line', 'lseg',
208
- 'macaddr', 'money', 'numeric', 'path', 'point', 'polygon', 'real',
209
- 'serial', 'serial4', 'serial8', 'smallint', 'text', 'time', 'timestamp',
210
- 'timestamptz', 'timetz', 'tsquery', 'tsvector', 'txid_snapshot', 'uuid',
211
- 'varbit', 'varchar', 'with time zone', 'without time zone', 'xml',
207
+ 'int', 'int2', 'int4', 'int8', 'integer', 'interval', 'json', 'line',
208
+ 'lseg', 'macaddr', 'money', 'numeric', 'path', 'point', 'polygon',
209
+ 'real', 'serial', 'serial2', 'serial4', 'serial8', 'smallint',
210
+ 'smallserial', 'text', 'time', 'timestamp', 'timestamptz', 'timetz',
211
+ 'tsquery', 'tsvector', 'txid_snapshot', 'uuid', 'varbit', 'varchar',
212
+ 'with time zone', 'without time zone', 'xml',
212
213
  ]
213
214
 
214
215
  PSEUDO_TYPES = [
215
- 'any', 'anyarray', 'anyelement', 'anyenum', 'anynonarray', 'cstring',
216
- 'internal', 'language_handler', 'fdw_handler', 'record', 'trigger',
217
- 'void', 'opaque',
216
+ 'any', 'anyelement', 'anyarray', 'anynonarray', 'anyenum', 'anyrange',
217
+ 'cstring', 'internal', 'language_handler', 'fdw_handler', 'record',
218
+ 'trigger', 'void', 'opaque',
218
219
  ]
219
220
 
220
221
  # Remove 'trigger' from types
@@ -31,7 +31,7 @@ class GasLexer(RegexLexer):
31
31
 
32
32
  #: optional Comment or Whitespace
33
33
  string = r'"(\\"|[^"])*"'
34
- char = r'[a-zA-Z$._0-9@]'
34
+ char = r'[a-zA-Z$._0-9@-]'
35
35
  identifier = r'(?:[a-zA-Z$_]' + char + '*|\.' + char + '+)'
36
36
  number = r'(?:0[xX][a-zA-Z0-9]+|\d+)'
37
37
 
@@ -304,7 +304,8 @@ class NasmLexer(RegexLexer):
304
304
  floatn = decn + r'\.e?' + decn
305
305
  string = r'"(\\"|[^"\n])*"|' + r"'(\\'|[^'\n])*'|" + r"`(\\`|[^`\n])*`"
306
306
  declkw = r'(?:res|d)[bwdqt]|times'
307
- register = (r'[a-d][lh]|e?[a-d]x|e?[sb]p|e?[sd]i|[c-gs]s|st[0-7]|'
307
+ register = (r'r[0-9][0-5]?[bwd]|'
308
+ r'[a-d][lh]|[er]?[a-d]x|[er]?[sb]p|[er]?[sd]i|[c-gs]s|st[0-7]|'
308
309
  r'mm[0-7]|cr[0-4]|dr[0-367]|tr[3-7]')
309
310
  wordop = r'seg|wrt|strict'
310
311
  type = r'byte|[dq]?word'
@@ -102,7 +102,7 @@ class CSharpLexer(RegexLexer):
102
102
  Comment.Preproc),
103
103
  (r'\b(extern)(\s+)(alias)\b', bygroups(Keyword, Text,
104
104
  Keyword)),
105
- (r'(abstract|as|base|break|case|catch|'
105
+ (r'(abstract|as|async|await|base|break|case|catch|'
106
106
  r'checked|const|continue|default|delegate|'
107
107
  r'do|else|enum|event|explicit|extern|false|finally|'
108
108
  r'fixed|for|foreach|goto|if|implicit|in|interface|'
@@ -1688,13 +1688,46 @@ class ElixirLexer(RegexLexer):
1688
1688
  filenames = ['*.ex', '*.exs']
1689
1689
  mimetypes = ['text/x-elixir']
1690
1690
 
1691
+ def gen_elixir_sigil_rules():
1692
+ states = {}
1693
+
1694
+ states['strings'] = [
1695
+ (r'(%[A-Ba-z])?"""(?:.|\n)*?"""', String.Doc),
1696
+ (r"'''(?:.|\n)*?'''", String.Doc),
1697
+ (r'"', String.Double, 'dqs'),
1698
+ (r"'.*'", String.Single),
1699
+ (r'(?<!\w)\?(\\(x\d{1,2}|\h{1,2}(?!\h)\b|0[0-7]{0,2}(?![0-7])\b|'
1700
+ r'[^x0MC])|(\\[MC]-)+\w|[^\s\\])', String.Other)
1701
+ ]
1702
+
1703
+ for lbrace, rbrace, name, in ('\\{', '\\}', 'cb'), \
1704
+ ('\\[', '\\]', 'sb'), \
1705
+ ('\\(', '\\)', 'pa'), \
1706
+ ('\\<', '\\>', 'lt'):
1707
+
1708
+ states['strings'] += [
1709
+ (r'%[a-z]' + lbrace, String.Double, name + 'intp'),
1710
+ (r'%[A-Z]' + lbrace, String.Double, name + 'no-intp')
1711
+ ]
1712
+
1713
+ states[name +'intp'] = [
1714
+ (r'' + rbrace + '[a-z]*', String.Double, "#pop"),
1715
+ include('enddoublestr')
1716
+ ]
1717
+
1718
+ states[name +'no-intp'] = [
1719
+ (r'.*' + rbrace + '[a-z]*', String.Double , "#pop")
1720
+ ]
1721
+
1722
+ return states
1723
+
1691
1724
  tokens = {
1692
1725
  'root': [
1693
1726
  (r'\s+', Text),
1694
1727
  (r'#.*$', Comment.Single),
1695
- (r'\b(case|end|bc|lc|if|unless|try|loop|receive|fn|defmodule|'
1696
- r'defp|def|defprotocol|defimpl|defrecord|defmacro|defdelegate|'
1697
- r'defexception|exit|raise|throw)\b(?![?!])|'
1728
+ (r'\b(case|cond|end|bc|lc|if|unless|try|loop|receive|fn|defmodule|'
1729
+ r'defp?|defprotocol|defimpl|defrecord|defmacrop?|defdelegate|'
1730
+ r'defexception|exit|raise|throw|unless|after|rescue|catch|else)\b(?![?!])|'
1698
1731
  r'(?<!\.)\b(do|\-\>)\b\s*', Keyword),
1699
1732
  (r'\b(import|require|use|recur|quote|unquote|super|refer)\b(?![?!])',
1700
1733
  Keyword.Namespace),
@@ -1708,26 +1741,18 @@ class ElixirLexer(RegexLexer):
1708
1741
  r'\*\*?|=?~|<\-)|([a-zA-Z_]\w*([?!])?)(:)(?!:)', String.Symbol),
1709
1742
  (r':"', String.Symbol, 'interpoling_symbol'),
1710
1743
  (r'\b(nil|true|false)\b(?![?!])|\b[A-Z]\w*\b', Name.Constant),
1711
- (r'\b(__(FILE|LINE|MODULE|LOCAL|MAIN|FUNCTION)__)\b(?![?!])', Name.Builtin.Pseudo),
1744
+ (r'\b(__(FILE|LINE|MODULE|MAIN|FUNCTION)__)\b(?![?!])', Name.Builtin.Pseudo),
1712
1745
  (r'[a-zA-Z_!][\w_]*[!\?]?', Name),
1713
1746
  (r'[(){};,/\|:\\\[\]]', Punctuation),
1714
1747
  (r'@[a-zA-Z_]\w*|&\d', Name.Variable),
1715
1748
  (r'\b(0[xX][0-9A-Fa-f]+|\d(_?\d)*(\.(?![^\d\s])'
1716
1749
  r'(_?\d)*)?([eE][-+]?\d(_?\d)*)?|0[bB][01]+)\b', Number),
1750
+ (r'%r\/.*\/', String.Regex),
1717
1751
  include('strings'),
1718
1752
  ],
1719
- 'strings': [
1720
- (r'"""(?:.|\n)*?"""', String.Doc),
1721
- (r"'''(?:.|\n)*?'''", String.Doc),
1722
- (r'"', String.Double, 'dqs'),
1723
- (r"'.*'", String.Single),
1724
- (r'(?<!\w)\?(\\(x\d{1,2}|\h{1,2}(?!\h)\b|0[0-7]{0,2}(?![0-7])\b|'
1725
- r'[^x0MC])|(\\[MC]-)+\w|[^\s\\])', String.Other)
1726
- ],
1727
1753
  'dqs': [
1728
1754
  (r'"', String.Double, "#pop"),
1729
- include('interpoling'),
1730
- (r'[^#"]+', String.Double),
1755
+ include('enddoublestr')
1731
1756
  ],
1732
1757
  'interpoling': [
1733
1758
  (r'#{', String.Interpol, 'interpoling_string'),
@@ -1741,7 +1766,12 @@ class ElixirLexer(RegexLexer):
1741
1766
  include('interpoling'),
1742
1767
  (r'[^#"]+', String.Symbol),
1743
1768
  ],
1769
+ 'enddoublestr' : [
1770
+ include('interpoling'),
1771
+ (r'[^#"]+', String.Double),
1772
+ ]
1744
1773
  }
1774
+ tokens.update(gen_elixir_sigil_rules())
1745
1775
 
1746
1776
 
1747
1777
  class ElixirConsoleLexer(Lexer):
@@ -28,25 +28,36 @@ class Dasm16Lexer(RegexLexer):
28
28
  mimetypes = ['text/x-dasm16']
29
29
 
30
30
  INSTRUCTIONS = [
31
- 'SET', 'ADD', 'SUB', 'MUL', 'DIV', 'MOD', 'SHL',
32
- 'SHR', 'AND', 'BOR', 'XOR', 'IFE', 'IFN', 'IFG', 'IFB',
33
- 'JSR'
31
+ 'SET',
32
+ 'ADD', 'SUB',
33
+ 'MUL', 'MLI',
34
+ 'DIV', 'DVI',
35
+ 'MOD', 'MDI',
36
+ 'AND', 'BOR', 'XOR',
37
+ 'SHR', 'ASR', 'SHL',
38
+ 'IFB', 'IFC', 'IFE', 'IFN', 'IFG', 'IFA', 'IFL', 'IFU',
39
+ 'ADX', 'SBX',
40
+ 'STI', 'STD',
41
+ 'JSR',
42
+ 'INT', 'IAG', 'IAS', 'RFI', 'IAQ', 'HWN', 'HWQ', 'HWI',
34
43
  ]
35
44
 
36
45
  REGISTERS = [
37
46
  'A', 'B', 'C',
38
47
  'X', 'Y', 'Z',
39
48
  'I', 'J',
40
- 'SP', 'PC',
49
+ 'SP', 'PC', 'EX',
41
50
  'POP', 'PEEK', 'PUSH'
42
51
  ]
43
52
 
44
53
  # Regexes yo
45
- string = r'"(\\"|[^"])*"'
46
54
  char = r'[a-zA-Z$._0-9@]'
47
55
  identifier = r'(?:[a-zA-Z$_]' + char + '*|\.' + char + '+)'
48
- number = r'(?:0[xX][a-zA-Z0-9]+|\d+)'
56
+ number = r'[+-]?(?:0[xX][a-zA-Z0-9]+|\d+)'
57
+ binary_number = r'0b[01_]+'
49
58
  instruction = r'(?i)(' + '|'.join(INSTRUCTIONS) + ')'
59
+ single_char = r"'\\?" + char + "'"
60
+ string = r'"(\\"|[^"])*"'
50
61
 
51
62
  def guess_identifier(lexer, match):
52
63
  ident = match.group(0)
@@ -57,14 +68,21 @@ class Dasm16Lexer(RegexLexer):
57
68
  'root': [
58
69
  include('whitespace'),
59
70
  (':' + identifier, Name.Label),
71
+ (identifier + ':', Name.Label),
60
72
  (instruction, Name.Function, 'instruction-args'),
61
- (r'(DAT|dat)', Name.Function, 'data-args'),
73
+ (r'\.' + identifier, Name.Function, 'data-args'),
62
74
  (r'[\r\n]+', Text)
63
75
  ],
64
76
 
77
+ 'numeric' : [
78
+ (binary_number, Number.Integer),
79
+ (number, Number.Integer),
80
+ (single_char, String),
81
+ ],
82
+
65
83
  'arg' : [
66
84
  (identifier, guess_identifier),
67
- (number, Number.Integer),
85
+ include('numeric')
68
86
  ],
69
87
 
70
88
  'deref' : [
@@ -89,7 +107,7 @@ class Dasm16Lexer(RegexLexer):
89
107
 
90
108
  'data-args' : [
91
109
  (r',', Punctuation),
92
- (number, Number.Integer),
110
+ include('numeric'),
93
111
  (string, String),
94
112
  include('instruction-line')
95
113
  ],
@@ -50,7 +50,7 @@ class JuliaLexer(RegexLexer):
50
50
  # keywords
51
51
  (r'(begin|while|for|in|return|break|continue|'
52
52
  r'macro|quote|let|if|elseif|else|try|catch|end|'
53
- r'bitstype|ccall)\b', Keyword),
53
+ r'bitstype|ccall|do)\b', Keyword),
54
54
  (r'(local|global|const)\b', Keyword.Declaration),
55
55
  (r'(module|import|export)\b', Keyword.Reserved),
56
56
  (r'(Bool|Int|Int8|Int16|Int32|Int64|Uint|Uint8|Uint16|Uint32|Uint64'
@@ -297,7 +297,6 @@ class SmalltalkLexer(RegexLexer):
297
297
  (r'\^|\:=|\_', Operator),
298
298
  # temporaries
299
299
  (r'[\]({}.;!]', Text),
300
-
301
300
  ],
302
301
  'method definition' : [
303
302
  # Not perfect can't allow whitespaces at the beginning and the
@@ -316,7 +315,7 @@ class SmalltalkLexer(RegexLexer):
316
315
  (r'', Text, '#pop'), # else pop
317
316
  ],
318
317
  'literals' : [
319
- (r'\'[^\']*\'', String, 'afterobject'),
318
+ (r"'(''|[^'])*'", String, 'afterobject'),
320
319
  (r'\$.', String.Char, 'afterobject'),
321
320
  (r'#\(', String.Symbol, 'parenth'),
322
321
  (r'\)', Text, 'afterobject'),
@@ -327,14 +326,14 @@ class SmalltalkLexer(RegexLexer):
327
326
  (r'(\d+r)?-?\d+(\.\d+)?(e-?\d+)?', Number),
328
327
  (r'[-+*/\\~<>=|&#!?,@%\w:]+', String.Symbol),
329
328
  # literals
330
- (r'\'[^\']*\'', String),
329
+ (r"'(''|[^'])*'", String),
331
330
  (r'\$.', String.Char),
332
331
  (r'#*\(', String.Symbol, 'inner_parenth'),
333
332
  ],
334
333
  'parenth' : [
335
334
  # This state is a bit tricky since
336
335
  # we can't just pop this state
337
- (r'\)', String.Symbol, ('root','afterobject')),
336
+ (r'\)', String.Symbol, ('root', 'afterobject')),
338
337
  include('_parenth_helper'),
339
338
  ],
340
339
  'inner_parenth': [
@@ -344,7 +343,7 @@ class SmalltalkLexer(RegexLexer):
344
343
  'whitespaces' : [
345
344
  # skip whitespace and comments
346
345
  (r'\s+', Text),
347
- (r'"[^"]*"', Comment),
346
+ (r'"(""|[^"])*"', Comment),
348
347
  ],
349
348
  'objects' : [
350
349
  (r'\[', Text, 'blockvariables'),
@@ -353,7 +352,7 @@ class SmalltalkLexer(RegexLexer):
353
352
  Name.Builtin.Pseudo, 'afterobject'),
354
353
  (r'\b[A-Z]\w*(?!:)\b', Name.Class, 'afterobject'),
355
354
  (r'\b[a-z]\w*(?!:)\b', Name.Variable, 'afterobject'),
356
- (r'#("[^"]*"|[-+*/\\~<>=|&!?,@%]+|[\w:]+)',
355
+ (r'#("(""|[^"])*"|[-+*/\\~<>=|&!?,@%]+|[\w:]+)',
357
356
  String.Symbol, 'afterobject'),
358
357
  include('literals'),
359
358
  ],
@@ -374,11 +373,11 @@ class SmalltalkLexer(RegexLexer):
374
373
  ],
375
374
  'squeak fileout' : [
376
375
  # Squeak fileout format (optional)
377
- (r'^"[^"]*"!', Keyword),
378
- (r"^'[^']*'!", Keyword),
376
+ (r'^"(""|[^"])*"!', Keyword),
377
+ (r"^'(''|[^'])*'!", Keyword),
379
378
  (r'^(!)(\w+)( commentStamp: )(.*?)( prior: .*?!\n)(.*?)(!)',
380
379
  bygroups(Keyword, Name.Class, Keyword, String, Keyword, Text, Keyword)),
381
- (r'^(!)(\w+(?: class)?)( methodsFor: )(\'[^\']*\')(.*?!)',
380
+ (r"^(!)(\w+(?: class)?)( methodsFor: )('(?:''|[^'])*')(.*?!)",
382
381
  bygroups(Keyword, Name.Class, Keyword, String, Keyword)),
383
382
  (r'^(\w+)( subclass: )(#\w+)'
384
383
  r'(\s+instanceVariableNames: )(.*?)'
@@ -39,7 +39,6 @@
39
39
  """
40
40
 
41
41
  import re
42
- from copy import deepcopy
43
42
 
44
43
  from pygments.lexer import Lexer, RegexLexer, do_insertions, bygroups
45
44
  from pygments.token import Punctuation, \
@@ -61,9 +60,6 @@ def language_callback(lexer, match):
61
60
  """Parse the content of a $-string using a lexer
62
61
 
63
62
  The lexer is chosen looking for a nearby LANGUAGE.
64
-
65
- Note: this function should have been a `PostgresBase` method, but the
66
- rules deepcopy fails in this case.
67
63
  """
68
64
  l = None
69
65
  m = language_re.match(lexer.text[match.end():match.end()+100])
@@ -93,8 +89,6 @@ class PostgresBase(object):
93
89
  had, _tokens could be created on this ancestor and not updated for the
94
90
  other classes, resulting e.g. in PL/pgSQL parsed as SQL. This shortcoming
95
91
  seem to suggest that regexp lexers are not really subclassable.
96
-
97
- `language_callback` should really be our method, but this breaks deepcopy.
98
92
  """
99
93
  def get_tokens_unprocessed(self, text, *args):
100
94
  # Have a copy of the entire text to be used by `language_callback`.
@@ -182,7 +176,7 @@ class PlPgsqlLexer(PostgresBase, RegexLexer):
182
176
  mimetypes = ['text/x-plpgsql']
183
177
 
184
178
  flags = re.IGNORECASE
185
- tokens = deepcopy(PostgresLexer.tokens)
179
+ tokens = dict((k, l[:]) for (k, l) in PostgresLexer.tokens.iteritems())
186
180
 
187
181
  # extend the keywords list
188
182
  for i, pattern in enumerate(tokens['root']):
@@ -216,7 +210,7 @@ class PsqlRegexLexer(PostgresBase, RegexLexer):
216
210
  aliases = [] # not public
217
211
 
218
212
  flags = re.IGNORECASE
219
- tokens = deepcopy(PostgresLexer.tokens)
213
+ tokens = dict((k, l[:]) for (k, l) in PostgresLexer.tokens.iteritems())
220
214
 
221
215
  tokens['root'].append(
222
216
  (r'\\[^\s]+', Keyword.Pseudo, 'psql-command'))
@@ -1504,6 +1504,7 @@ class ColdfusionLexer(RegexLexer):
1504
1504
  ],
1505
1505
  }
1506
1506
 
1507
+
1507
1508
  class ColdfusionMarkupLexer(RegexLexer):
1508
1509
  """
1509
1510
  Coldfusion markup only
@@ -528,7 +528,7 @@ class ApacheConfLexer(RegexLexer):
528
528
  (r'(#.*?)$', Comment),
529
529
  (r'(<[^\s>]+)(?:(\s+)(.*?))?(>)',
530
530
  bygroups(Name.Tag, Text, String, Name.Tag)),
531
- (r'([a-zA-Z][a-zA-Z0-9]*)(\s+)',
531
+ (r'([a-zA-Z][a-zA-Z0-9_]*)(\s+)',
532
532
  bygroups(Name.Builtin, Text), 'value'),
533
533
  (r'\.+', Text),
534
534
  ],
@@ -101,6 +101,16 @@ class JSONLexer(RegexLexer):
101
101
  filenames = ['*.json']
102
102
  mimetypes = [ 'application/json', ]
103
103
 
104
+ # integer part of a number
105
+ int_part = r'-?(0|[1-9]\d*)'
106
+
107
+ # fractional part of a number
108
+ frac_part = r'\.\d+'
109
+
110
+ # exponential part of a number
111
+ exp_part = r'[eE](\+|-)?\d+'
112
+
113
+
104
114
  flags = re.DOTALL
105
115
  tokens = {
106
116
  'whitespace': [
@@ -108,9 +118,12 @@ class JSONLexer(RegexLexer):
108
118
  ],
109
119
 
110
120
  # represents a simple terminal value
111
- 'simplevalue':[
121
+ 'simplevalue': [
112
122
  (r'(true|false|null)\b', Keyword.Constant),
113
- (r'-?[0-9]+', Number.Integer),
123
+ (('%(int_part)s(%(frac_part)s%(exp_part)s|'
124
+ '%(exp_part)s|%(frac_part)s)') % vars(),
125
+ Number.Float),
126
+ (int_part, Number.Integer),
114
127
  (r'"(\\\\|\\"|[^"])*"', String.Double),
115
128
  ],
116
129
 
@@ -26,7 +26,7 @@ class StyleMeta(type):
26
26
  if len(col) == 6:
27
27
  return col
28
28
  elif len(col) == 3:
29
- return col[0]+'0'+col[1]+'0'+col[2]+'0'
29
+ return col[0]*2 + col[1]*2 + col[2]*2
30
30
  elif text == '':
31
31
  return ''
32
32
  assert False, "wrong color format %r" % text
@@ -11,6 +11,7 @@ import unittest
11
11
 
12
12
  from pygments.token import Text
13
13
  from pygments.lexer import RegexLexer
14
+ from pygments.lexer import bygroups
14
15
 
15
16
 
16
17
  class TestLexer(RegexLexer):
@@ -37,3 +38,10 @@ class TupleTransTest(unittest.TestCase):
37
38
  self.assertEqual(toks,
38
39
  [(0, Text.Root, 'a'), (1, Text.Rag, 'b'), (2, Text.Rag, 'c'),
39
40
  (3, Text.Beer, 'd'), (4, Text.Root, 'e')])
41
+
42
+ def test_multiline(self):
43
+ lx = TestLexer()
44
+ toks = list(lx.get_tokens_unprocessed('a\ne'))
45
+ self.assertEqual(toks,
46
+ [(0, Text.Root, 'a'), (1, Text, u'\n'),
47
+ (2, Text.Root, 'e')])
metadata CHANGED
@@ -1,13 +1,13 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: pygments.rb
3
3
  version: !ruby/object:Gem::Version
4
- hash: 15
4
+ hash: 13
5
5
  prerelease:
6
6
  segments:
7
7
  - 0
8
8
  - 2
9
- - 12
10
- version: 0.2.12
9
+ - 13
10
+ version: 0.2.13
11
11
  platform: ruby
12
12
  authors:
13
13
  - Aman Gupta
@@ -15,7 +15,8 @@ autorequire:
15
15
  bindir: bin
16
16
  cert_chain: []
17
17
 
18
- date: 2012-04-18 00:00:00 Z
18
+ date: 2012-06-19 00:00:00 +02:00
19
+ default_executable:
19
20
  dependencies:
20
21
  - !ruby/object:Gem::Dependency
21
22
  name: rubypython
@@ -412,6 +413,7 @@ files:
412
413
  - vendor/pygments-main/tests/test_token.py
413
414
  - vendor/pygments-main/tests/test_using_api.py
414
415
  - vendor/pygments-main/tests/test_util.py
416
+ has_rdoc: true
415
417
  homepage: http://github.com/tmm1/pygments.rb
416
418
  licenses: []
417
419
 
@@ -441,7 +443,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
441
443
  requirements: []
442
444
 
443
445
  rubyforge_project:
444
- rubygems_version: 1.8.15
446
+ rubygems_version: 1.6.2
445
447
  signing_key:
446
448
  specification_version: 3
447
449
  summary: pygments wrapper for ruby