rouge 0.0.1 → 0.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/Gemfile CHANGED
@@ -1 +1,12 @@
1
+ source 'http://rubygems.org'
2
+
1
3
  gemspec
4
+
5
+ gem 'minitest'
6
+ gem 'sexp_processor', '~> 3.0'
7
+ gem 'wrong', '~> 0.6.2'
8
+
9
+ gem 'rake'
10
+
11
+ # for visual tests
12
+ gem 'sinatra'
@@ -1,2 +1,25 @@
1
+ # stdlib
2
+ require 'pathname'
3
+
1
4
  module Rouge
5
+ class << self
6
+ def highlight(text, lexer_name, formatter)
7
+ lexer = Lexer.find(lexer_name)
8
+ raise "unknown lexer #{lexer_name}" unless lexer
9
+
10
+ formatter.render(lexer.lex(text))
11
+ end
12
+ end
2
13
  end
14
+
15
+ load_dir = Pathname.new(__FILE__).dirname
16
+ load load_dir.join('rouge/token.rb')
17
+ load load_dir.join('rouge/lexer.rb')
18
+ load load_dir.join('rouge/lexers/shell.rb')
19
+ load load_dir.join('rouge/lexers/javascript.rb')
20
+
21
+ load load_dir.join('rouge/formatter.rb')
22
+ load load_dir.join('rouge/formatters/html.rb')
23
+
24
+ load load_dir.join('rouge/theme.rb')
25
+ load load_dir.join('rouge/themes/thankful_eyes.rb')
@@ -0,0 +1,11 @@
1
+ module Rouge
2
+ class Formatter
3
+ def render(tokens)
4
+ enum_for(:stream, tokens).to_a.join
5
+ end
6
+
7
+ def stream(tokens, &b)
8
+ raise 'abstract'
9
+ end
10
+ end
11
+ end
@@ -0,0 +1,33 @@
1
+ module Rouge
2
+ module Formatters
3
+ class HTML < Formatter
4
+ def initialize(opts={})
5
+ @css_class = opts[:css_class] || 'highlight'
6
+ end
7
+
8
+ def stream(tokens, &b)
9
+ yield "<pre class=#{@css_class.inspect}>"
10
+ tokens.each do |tok, val|
11
+ # TODO: properly html-encode val
12
+ val.gsub! '&', '&amp;'
13
+ val.gsub! '<', '&lt;'
14
+ val.gsub! '>', '&gt;'
15
+
16
+ case tok.shortname
17
+ when ''
18
+ yield val
19
+ when nil
20
+ raise "unknown token: #{tok.inspect}"
21
+ else
22
+ yield '<span class='
23
+ yield tok.shortname.inspect
24
+ yield '>'
25
+ yield val
26
+ yield '</span>'
27
+ end
28
+ end
29
+ yield '</pre>'
30
+ end
31
+ end
32
+ end
33
+ end
@@ -0,0 +1,238 @@
1
+ module Rouge
2
+ class Lexer
3
+ class << self
4
+ def create(opts={}, &b)
5
+ new(opts, &b).send(:force_load!)
6
+ end
7
+
8
+ def find(name)
9
+ registry[name.to_s]
10
+ end
11
+
12
+ def register(name, lexer)
13
+ registry[name.to_s] = lexer
14
+ end
15
+
16
+ private
17
+ def registry
18
+ @registry ||= {}
19
+ end
20
+ end
21
+
22
+ def name(n=nil)
23
+ return @name if n.nil?
24
+
25
+ @name = n.to_s
26
+ aliases @name
27
+ end
28
+
29
+ def aliases(*args)
30
+ args.each { |arg| Lexer.register(arg, self) }
31
+ end
32
+
33
+ def initialize(opts={}, &b)
34
+ options opts
35
+ @lazy_load_proc = b
36
+ end
37
+
38
+ def default_options
39
+ {}
40
+ end
41
+
42
+ def options(o={})
43
+ (@options ||= default_options).merge!(o)
44
+
45
+ @options
46
+ end
47
+
48
+ def option(k, v=:absent)
49
+ if v == :absent
50
+ options[k.to_s]
51
+ else
52
+ options({ k.to_s => v })
53
+ end
54
+ end
55
+
56
+ def debug(&b)
57
+ puts(b.call) if option :debug
58
+ end
59
+
60
+ def get_tokens(stream)
61
+ lex(stream).to_a
62
+ end
63
+
64
+ def lex(stream, &b)
65
+ return enum_for(:lex, stream) unless block_given?
66
+
67
+ stream_tokens(stream, &b)
68
+ end
69
+
70
+ def stream_tokens(stream, &b)
71
+ raise 'abstract'
72
+ end
73
+
74
+ protected
75
+
76
+ def force_load!
77
+ return self if @force_load
78
+ @force_load = true
79
+ instance_eval &@lazy_load_proc
80
+
81
+ self
82
+ end
83
+ end
84
+
85
+ class RegexLexer < Lexer
86
+ class Rule
87
+ attr_reader :callback
88
+ attr_reader :next_lexer
89
+ attr_reader :re
90
+ def initialize(re, callback, next_lexer)
91
+ @orig_re = re
92
+ @re = Regexp.new %/\\A(?:#{re.source})/
93
+ @callback = callback
94
+ @next_lexer = next_lexer
95
+ end
96
+
97
+ def inspect
98
+ "#<Rule #{@orig_re.inspect}>"
99
+ end
100
+
101
+ def consume(stream, &b)
102
+ # TODO: I'm sure there is a much faster way of doing this.
103
+ # also, encapsulate the stream in its own class.
104
+ match = stream.match(@re)
105
+
106
+ if match
107
+ stream.slice!(0...$&.size)
108
+ yield match
109
+ return true
110
+ end
111
+
112
+ false
113
+ end
114
+ end
115
+
116
+ def initialize(parent=nil, opts={}, &defn)
117
+ if parent.is_a? Hash
118
+ opts = parent
119
+ parent = nil
120
+ end
121
+
122
+ @parent = parent
123
+ super(opts, &defn)
124
+ end
125
+
126
+ def lexer(name, opts={}, &defn)
127
+ @scope ||= {}
128
+ name = name.to_s
129
+
130
+ if block_given?
131
+ l = @scope[name] = RegexLexer.new(self, options.merge(opts), &defn)
132
+ l.instance_variable_set :@name, name
133
+ l
134
+ else
135
+ @scope[name] || @parent && @parent.lexer(name)
136
+ end
137
+ end
138
+
139
+ def mixin(lexer)
140
+ lexer = get_lexer(lexer)
141
+ lexer.force_load!
142
+
143
+ rules << lexer
144
+ end
145
+
146
+ def rules
147
+ force_load!
148
+ @rules ||= []
149
+ end
150
+
151
+ def rule(re, token=nil, next_lexer=nil, &callback)
152
+ if block_given?
153
+ next_lexer = token
154
+ else
155
+ if token.is_a? String
156
+ token = Token[token]
157
+ end
158
+
159
+ callback = proc { |match, &b| b.call token, match }
160
+ end
161
+
162
+ rules << Rule.new(re, callback, get_lexer(next_lexer))
163
+ end
164
+
165
+ def stream_tokens(stream, &b)
166
+ stream = stream.dup
167
+ stack = [self]
168
+
169
+ stream_with_stack(stream.dup, [self], &b)
170
+ end
171
+
172
+ def stream_with_stack(stream, stack, &b)
173
+ return true if stream.empty?
174
+
175
+ until stream.empty?
176
+ debug { "stack: #{stack.map(&:name).inspect}" }
177
+ debug { "parsing #{stream.slice(0..20).inspect}" }
178
+ success = stack.last.step(stream, stack, &b)
179
+
180
+ if !success
181
+ debug { " no match, yielding Error" }
182
+ b.call(Token['Error'], stream.slice!(0..0))
183
+ end
184
+ end
185
+ end
186
+
187
+ def step(stream, stack, &b)
188
+ rules.each do |rule|
189
+ return true if run_rule(rule, stream, stack, &b)
190
+ end
191
+
192
+ false
193
+ end
194
+
195
+ private
196
+ def get_lexer(o)
197
+ case o
198
+ when RegexLexer, :pop!
199
+ o
200
+ else
201
+ lexer o
202
+ end
203
+ end
204
+
205
+ def run_rule(rule, stream, stack, &b)
206
+ case rule
207
+ when String, RegexLexer
208
+ lexer = get_lexer(rule)
209
+ debug { " entering mixin #{lexer.name}" }
210
+ get_lexer(rule).step(stream, stack, &b)
211
+ when Rule
212
+ debug { " trying #{rule.inspect}" }
213
+ rule.consume(stream) do |match|
214
+ debug { " got #{match[0].inspect}" }
215
+
216
+ rule.callback.call(*match) do |tok, res|
217
+ if tok.is_a? String
218
+ tok = Token[tok]
219
+ end
220
+
221
+ debug { " yielding #{tok.name.inspect}, #{res.inspect}" }
222
+ b.call(tok, res)
223
+ end
224
+
225
+ if rule.next_lexer == :pop!
226
+ debug { " popping stack" }
227
+ stack.pop
228
+ elsif rule.next_lexer
229
+ lexer = get_lexer(rule.next_lexer)
230
+ debug { " entering #{lexer.name}" }
231
+ stack.push lexer
232
+ end
233
+ end
234
+ end
235
+ end
236
+
237
+ end
238
+ end
@@ -0,0 +1,83 @@
1
+ module Rouge
2
+ module Lexers
3
+ JavascriptLexer = RegexLexer.create do
4
+ option :debug, true
5
+
6
+ name 'javascript'
7
+ aliases 'js'
8
+
9
+ lexer :comments_and_whitespace do
10
+ rule /\s+/, 'Text'
11
+ rule /<!--/, 'Comment' # really...?
12
+ rule %r(//.*?\n), 'Comment.Single'
13
+ rule %r(/\*.*?\*/), 'Comment.Multiline'
14
+ end
15
+
16
+ lexer :slash_starts_regex do
17
+ mixin :comments_and_whitespace
18
+ rule %r(
19
+ /(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/ # a nonempty regex
20
+ (?:[gim]+\b|\B) # regex flags
21
+ )x, 'Literal.String.Regex'
22
+
23
+ # if it's not matched by the above r.e., it's not
24
+ # a valid expression, so we use :bad_regex to eat until the
25
+ # end of the line.
26
+ rule %r(/), 'Literal.String.Regex', :bad_regex
27
+ rule //, 'Text', :pop!
28
+
29
+ lexer :bad_regex do
30
+ rule /[^\n]+/, 'Error', :pop!
31
+ end
32
+ end
33
+
34
+ keywords = %w(
35
+ for in while do break return continue switch case default if else
36
+ throw try catch finally new delete typeof instanceof void this
37
+ ).join('|')
38
+
39
+ declarations = %w(var let with function).join('|')
40
+
41
+ reserved = %w(
42
+ abstract boolean byte char class const debugger double enum export
43
+ extends final float goto implements import int interface long
44
+ native package private protected public short static super
45
+ synchronized throws transient volatile
46
+ ).join('|')
47
+
48
+ constants = %w(true false null NaN Infinity undefined).join('|')
49
+
50
+ builtins = %w(
51
+ Array Boolean Date Error Function Math netscape
52
+ Number Object Packages RegExp String sun decodeURI
53
+ decodeURIComponent encodeURI encodeURIComponent
54
+ Error eval isFinite isNaN parseFloat parseInt document this
55
+ window
56
+ ).join('|')
57
+
58
+ lexer :root do
59
+ rule %r(^(?=\s|/|<!--)), 'Text', :slash_starts_regex
60
+ mixin :comments_and_whitespace
61
+ rule %r(\+\+|--|~|&&|\?|\|\||\\(?=\n)|<<|>>>?|===|!==),
62
+ 'Operator', :slash_starts_regex
63
+ rule %r([-<>+*%&|\^/!=]=?), 'Operator', :slash_starts_regex
64
+ rule /[{(\[;,]/, 'Punctuation', :slash_starts_regex
65
+ rule /[})\].]/, 'Punctuation'
66
+ rule /(?:#{keywords})\b/, 'Keyword', :slash_starts_regex
67
+ rule /(?:#{declarations})\b/, 'Keyword.Declaration', :slash_starts_regex
68
+ rule /(?:#{reserved})\b/, 'Keyword.Reserved'
69
+ rule /(?:#{constants})\b/, 'Keyword.Constant'
70
+ rule /(?:#{builtins})\b/, 'Name.Builtin'
71
+ rule /[$a-zA-Z_][a-zA-Z0-9_]*/, 'Name.Other'
72
+
73
+ rule /[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?/, 'Number.Float'
74
+ rule /0x[0-9a-fA-F]+/, 'Number.Hex'
75
+ rule /[0-9]+/, 'Number.Integer'
76
+ rule /"(\\\\|\\"|[^"])*"/, 'Literal.String.Double'
77
+ rule /'(\\\\|\\'|[^'])*'/, 'Literal.String.Single'
78
+ end
79
+
80
+ mixin :root
81
+ end
82
+ end
83
+ end
@@ -0,0 +1,109 @@
1
+ module Rouge
2
+ module Lexers
3
+ ShellLexer = RegexLexer.create do
4
+ name 'shell'
5
+ aliases 'bash', 'zsh', 'ksh', 'sh'
6
+
7
+ KEYWORDS = %w(
8
+ if fi else while do done for then return function case
9
+ select continue until esac elif
10
+ ).join('|')
11
+
12
+ BUILTINS = %w(
13
+ alias bg bind break builtin caller cd command compgen
14
+ complete declare dirs disown echo enable eval exec exit
15
+ export false fc fg getopts hash help history jobs kill let
16
+ local logout popd printf pushd pwd read readonly set shift
17
+ shopt source suspend test time times trap true type typeset
18
+ ulimit umask unalias unset wait
19
+ ).join('|')
20
+
21
+ lexer :basic do
22
+ rule /#.*\n/, 'Comment'
23
+
24
+ rule /\b(#{KEYWORDS})\s*\b/, 'Keyword'
25
+
26
+ rule /\b(#{BUILTINS})\s*\b(?!\.)/, 'Name.Builtin'
27
+
28
+ rule /(\b\w+)(=)/ do |_, var, eq, &out|
29
+ out.call 'Name.Variable', var
30
+ out.call 'Operator', eq
31
+ end
32
+
33
+ rule /[\[\]{}()=]/, 'Operator'
34
+ rule /&&|\|\|/, 'Operator'
35
+ # rule /\|\|/, 'Operator'
36
+
37
+ rule /<<</, 'Operator' # here-string
38
+ rule /<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2/, 'Literal.String'
39
+ end
40
+
41
+ lexer :double_quotes do
42
+ rule /"/, 'Literal.String.Double', :pop!
43
+ rule /\\./, 'Literal.String.Escape'
44
+ mixin :interp
45
+ rule /[^"`\\$]+/, 'Literal.String.Double'
46
+ end
47
+
48
+ lexer :data do
49
+ # TODO: this should be its own sublexer so we can capture
50
+ # interpolation and such
51
+ rule /$?"/, 'Literal.String.Double', :double_quotes
52
+
53
+ # single quotes are much easier than double quotes - we can
54
+ # literally just scan until the next single quote.
55
+ # POSIX: Enclosing characters in single-quotes ( '' )
56
+ # shall preserve the literal value of each character within the
57
+ # single-quotes. A single-quote cannot occur within single-quotes.
58
+ rule /$?'[^']*'/, 'Literal.String.Single'
59
+
60
+ rule /;/, 'Text'
61
+ rule /\s+/, 'Text'
62
+ rule /[^=\s\[\]{}()$"\'`\\<]+/, 'Text'
63
+ rule /\d+(?= |\Z)/, 'Number'
64
+ rule /</, 'Text'
65
+ mixin :interp
66
+ end
67
+
68
+ lexer :curly do
69
+ rule /}/, 'Keyword', :pop!
70
+ rule /:-/, 'Keyword'
71
+ rule /[a-zA-Z0-9_]+/, 'Name.Variable'
72
+ rule /[^}:"'`$]+/, 'Punctuation'
73
+ mixin :root
74
+ end
75
+
76
+ lexer :paren do
77
+ rule /\)/, 'Keyword', :pop!
78
+ mixin :root
79
+ end
80
+
81
+ lexer :math do
82
+ rule /\)\)/, 'Keyword', :pop!
83
+ rule %r([-+*/%^|&]|\*\*|\|\|), 'Operator'
84
+ rule /\d+/, 'Number'
85
+ mixin :root
86
+ end
87
+
88
+ lexer :backticks do
89
+ rule /`/, 'Literal.String.Backtick', :pop!
90
+ mixin :root
91
+ end
92
+
93
+ lexer :interp do
94
+ rule /\$\(\(/, 'Keyword', :math
95
+ rule /\$\(/, 'Keyword', :paren
96
+ rule /\${#?/, 'Keyword', :curly
97
+ rule /`/, 'Literal.String.Backtick', :backticks
98
+ rule /\$#?(\w+|.)/, 'Name.Variable'
99
+ end
100
+
101
+ lexer :root do
102
+ mixin :basic
103
+ mixin :data
104
+ end
105
+
106
+ mixin :root
107
+ end
108
+ end
109
+ end
@@ -0,0 +1,112 @@
1
+ module Rouge
2
+ class Theme
3
+ class << self
4
+ def styles
5
+ @styles ||= {}
6
+ end
7
+
8
+ def main_style
9
+ @main_style ||= {}
10
+ end
11
+
12
+ def style(*tokens)
13
+ opts = {}
14
+ opts = tokens.pop if tokens.last.is_a? Hash
15
+
16
+ if tokens.empty?
17
+ @main_style = opts
18
+ end
19
+
20
+ tokens.each do |tok|
21
+ styles[tok.to_s] = opts
22
+ end
23
+ end
24
+
25
+ def name(n=nil)
26
+ return @name if n.nil?
27
+
28
+ @name = n.to_s
29
+ registry[@name] = self
30
+ end
31
+
32
+ def find(n)
33
+ registry[n.to_s]
34
+ end
35
+
36
+ def registry
37
+ @registry ||= {}
38
+ end
39
+ end
40
+ end
41
+
42
+ class CSSTheme < Theme
43
+ def initialize(opts={})
44
+ @opts = opts
45
+ end
46
+
47
+ def render
48
+ out = []
49
+ stream { |line| out << line }
50
+ out.join("\n")
51
+ end
52
+
53
+ def stream(&b)
54
+ return enum_for(:stream) unless block_given?
55
+
56
+ self.class.styles.each do |tokname, style|
57
+ stream_single(Token[tokname], style, &b)
58
+ end
59
+
60
+ render_stanza('.highlight', self.class.main_style, &b)
61
+ end
62
+
63
+ private
64
+ def stream_single(tok, style, &b)
65
+ render_stanza(css_selector(tok), style, &b)
66
+ end
67
+
68
+ def render_stanza(selector, style, &b)
69
+ return if style.empty?
70
+
71
+ yield "#{selector} {"
72
+ yield " color: #{style[:fg]};" if style[:fg]
73
+ yield " background-color: #{style[:bg]};" if style[:bg]
74
+ yield " font-weight: bold;" if style[:bold]
75
+ yield " font-style: italic;" if style[:italic]
76
+ yield " text-decoration: underline;" if style[:underline]
77
+
78
+ (style[:rules] || []).each do |rule|
79
+ yield " #{rule};"
80
+ end
81
+
82
+ yield "}"
83
+ end
84
+
85
+ def css_selector(token)
86
+ tokens = [token]
87
+ parent = token.parent
88
+
89
+ inflate_token(token).map do |tok|
90
+ base = ".highlight"
91
+ base << " .#{tok.shortname}" unless tok.shortname.empty?
92
+
93
+ base
94
+ end.join(', ')
95
+ end
96
+
97
+ # yield all of the tokens that should be styled the same
98
+ # as the given token. Essentially this recursively all of
99
+ # the subtokens, except those which are more specifically
100
+ # styled.
101
+ def inflate_token(tok, &b)
102
+ return enum_for(:inflate_token, tok) unless block_given?
103
+
104
+ yield tok
105
+ tok.sub_tokens.each_value do |st|
106
+ next if self.class.styles.include? st.name
107
+
108
+ inflate_token(st, &b)
109
+ end
110
+ end
111
+ end
112
+ end
@@ -0,0 +1,65 @@
1
+ module Rouge
2
+ module Themes
3
+ class ThankfulEyes < CSSTheme
4
+ # pallette, from GTKSourceView's ThankfulEyes
5
+ cool_as_ice = '#6c8b9f'
6
+ slate_blue = '#4e5d62'
7
+ eggshell_cloud = '#dee5e7'
8
+ krasna = '#122b3b'
9
+ aluminum1 = '#fefeec'
10
+ scarletred2 = '#cc0000'
11
+ butter3 = '#c4a000'
12
+ go_get_it = '#b2fd6d'
13
+ chilly = '#a8e1fe'
14
+ unicorn = '#faf6e4'
15
+ sandy = '#f6dd62'
16
+ pink_merengue = '#f696db'
17
+ dune = '#fff0a6'
18
+ backlit = '#4df4ff'
19
+ schrill = '#ffb000'
20
+
21
+ style :fg => unicorn, :bg => krasna
22
+
23
+ style 'Comment', :fg => cool_as_ice
24
+ style 'Error',
25
+ 'Generic.Error', :fg => aluminum1, :bg => scarletred2
26
+ style 'Keyword', :fg => sandy, :bold => true
27
+ style 'Operator', :fg => backlit, :bold => true
28
+ style 'Comment.Preproc',
29
+ 'Comment.Multiline',
30
+ 'Comment.Single',
31
+ 'Comment.Special', :fg => cool_as_ice, :italic => true
32
+ style 'Generic.Deleted', :fg => scarletred2
33
+ style 'Generic.Emph', :italic => true
34
+ style 'Generic.Subheading', :fg => '#800080', :bold => true
35
+ style 'Generic.Traceback', :fg => '#0040D0'
36
+ style 'Keyword.Constant', :fg => pink_merengue, :bold => true
37
+ style 'Keyword.Namespace',
38
+ 'Keyword.Pseudo',
39
+ 'Keyword.Reserved', :fg => schrill, :bold => true
40
+ style 'Keyword.Type',
41
+ 'Name.Constant',
42
+ 'Name.Class',
43
+ 'Name.Decorator',
44
+ 'Name.Namespace',
45
+ 'Name.Builtin.Pseudo',
46
+ 'Name.Exception', :fg => go_get_it, :bold => true
47
+ style 'Literal.Number', :fg => pink_merengue, :bold => true
48
+ style 'Literal.String', :fg => dune, :bold => true
49
+ style 'Literal.String.Escape',
50
+ 'Literal.String.Char',
51
+ 'Literal.String.Interpol',
52
+ 'Literal.String.Other',
53
+ 'Literal.String.Symbol', :fg => backlit, :bold => true
54
+ style 'Name.Attribute', :fg => '#7D9029'
55
+ style 'Name.Builtin', :fg => sandy
56
+ style 'Name.Entity', :fg => '#999999', :bold => true
57
+ style 'Name.Label', :fg => '#A0A000'
58
+ style 'Name.Tag', :fg => '#008000', :bold => true
59
+ style 'Text.Whitespace', :fg => '#BBBBBB'
60
+ style 'Name.Variable',
61
+ 'Name.Function', :fg => chilly
62
+
63
+ end
64
+ end
65
+ end
@@ -0,0 +1,168 @@
1
+ module Rouge
2
+ class Token
3
+ attr_reader :name
4
+ attr_reader :parent
5
+ attr_accessor :shortname
6
+
7
+ def make_single(name)
8
+ name = name.to_s
9
+ new_name = [self.name, name].compact.join('.')
10
+
11
+ new_token = self.clone
12
+ parent = self
13
+ new_token.instance_eval do
14
+ @name = new_name
15
+ @parent = parent
16
+ @sub_tokens = {}
17
+ end
18
+
19
+ sub_tokens[name] = new_token
20
+
21
+ new_token
22
+ end
23
+
24
+ def make(name, shortname=nil)
25
+ names = name.split('.')
26
+ names.inject(self) do |tok, name|
27
+ tok.make_single(name)
28
+ end
29
+ end
30
+
31
+ def [](name)
32
+ name = name.to_s
33
+
34
+ name.split('.').inject(self) do |tok, name|
35
+ tok.sub_tokens[name] || tok.make_single(name)
36
+ end
37
+ end
38
+
39
+ def sub_tokens
40
+ @sub_tokens ||= {}
41
+ end
42
+
43
+ def ===(other)
44
+ immediate = if self.class == other.class
45
+ self == other
46
+ else
47
+ self.name == other
48
+ end
49
+
50
+ immediate || !!(other.parent && self === other.parent)
51
+ end
52
+
53
+ def inspect
54
+ parts = [name.inspect]
55
+ parts << shortname.inspect if shortname
56
+ "#<Token #{parts.join(' ')}>"
57
+ end
58
+
59
+ class << self
60
+ def base
61
+ @base ||= new
62
+ end
63
+
64
+ def get(name)
65
+ base[name]
66
+ end
67
+
68
+ def token(name, shortname)
69
+ tok = get(name)
70
+ tok.shortname = shortname
71
+ tok
72
+ end
73
+
74
+ alias [] get
75
+
76
+ def each_token(&b)
77
+ recurse = proc do |token|
78
+ b.call(token)
79
+ token.sub_tokens.each_value(&recurse)
80
+ end
81
+
82
+ base.sub_tokens.each_value(&recurse)
83
+ end
84
+ end
85
+
86
+ # XXX IMPORTANT XXX
87
+ # For compatibility, this list must be kept in sync with
88
+ # pygments.token.STANDARD_TYPES
89
+ token 'Text', ''
90
+ token 'Text.Whitespace', 'w'
91
+ token 'Error', 'err'
92
+ token 'Other', 'x'
93
+
94
+ token 'Keyword', 'k'
95
+ token 'Keyword.Constant', 'kc'
96
+ token 'Keyword.Declaration', 'kd'
97
+ token 'Keyword.Namespace', 'kn'
98
+ token 'Keyword.Pseudo', 'kp'
99
+ token 'Keyword.Reserved', 'kr'
100
+ token 'Keyword.Type', 'kt'
101
+
102
+ token 'Name', 'n'
103
+ token 'Name.Attribute', 'na'
104
+ token 'Name.Builtin', 'nb'
105
+ token 'Name.Builtin.Pseudo', 'bp'
106
+ token 'Name.Class', 'nc'
107
+ token 'Name.Constant', 'no'
108
+ token 'Name.Decorator', 'nd'
109
+ token 'Name.Entity', 'ni'
110
+ token 'Name.Exception', 'ne'
111
+ token 'Name.Function', 'nf'
112
+ token 'Name.Property', 'py'
113
+ token 'Name.Label', 'nl'
114
+ token 'Name.Namespace', 'nn'
115
+ token 'Name.Other', 'nx'
116
+ token 'Name.Tag', 'nt'
117
+ token 'Name.Variable', 'nv'
118
+ token 'Name.Variable.Class', 'vc'
119
+ token 'Name.Variable.Global', 'vg'
120
+ token 'Name.Variable.Instance', 'vi'
121
+
122
+ token 'Literal', 'l'
123
+ token 'Literal.Date', 'ld'
124
+
125
+ token 'Literal.String', 's'
126
+ token 'Literal.String.Backtick', 'sb'
127
+ token 'Literal.String.Char', 'sc'
128
+ token 'Literal.String.Doc', 'sd'
129
+ token 'Literal.String.Double', 's2'
130
+ token 'Literal.String.Escape', 'se'
131
+ token 'Literal.String.Heredoc', 'sh'
132
+ token 'Literal.String.Interpol', 'si'
133
+ token 'Literal.String.Other', 'sx'
134
+ token 'Literal.String.Regex', 'sr'
135
+ token 'Literal.String.Single', 's1'
136
+ token 'Literal.String.Symbol', 'ss'
137
+
138
+ token 'Literal.Number', 'm'
139
+ token 'Literal.Number.Float', 'mf'
140
+ token 'Literal.Number.Hex', 'mh'
141
+ token 'Literal.Number.Integer', 'mi'
142
+ token 'Literal.Number.Integer.Long', 'il'
143
+ token 'Literal.Number.Oct', 'mo'
144
+
145
+ token 'Operator', 'o'
146
+ token 'Operator.Word', 'ow'
147
+
148
+ token 'Punctuation', 'p'
149
+
150
+ token 'Comment', 'c'
151
+ token 'Comment.Multiline', 'cm'
152
+ token 'Comment.Preproc', 'cp'
153
+ token 'Comment.Single', 'c1'
154
+ token 'Comment.Special', 'cs'
155
+
156
+ token 'Generic', 'g'
157
+ token 'Generic.Deleted', 'gd'
158
+ token 'Generic.Emph', 'ge'
159
+ token 'Generic.Error', 'gr'
160
+ token 'Generic.Heading', 'gh'
161
+ token 'Generic.Inserted', 'gi'
162
+ token 'Generic.Output', 'go'
163
+ token 'Generic.Prompt', 'gp'
164
+ token 'Generic.Strong', 'gs'
165
+ token 'Generic.Subheading', 'gu'
166
+ token 'Generic.Traceback', 'gt'
167
+ end
168
+ end
@@ -1,5 +1,5 @@
1
1
  module Rouge
2
2
  def self.version
3
- "0.0.1"
3
+ "0.0.2"
4
4
  end
5
5
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: rouge
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.1
4
+ version: 0.0.2
5
5
  prerelease:
6
6
  platform: ruby
7
7
  authors:
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2012-08-30 00:00:00.000000000 Z
12
+ date: 2012-09-01 00:00:00.000000000 Z
13
13
  dependencies: []
14
14
  description: see the description for now
15
15
  email:
@@ -19,7 +19,15 @@ extensions: []
19
19
  extra_rdoc_files: []
20
20
  files:
21
21
  - Gemfile
22
+ - lib/rouge/lexers/shell.rb
23
+ - lib/rouge/lexers/javascript.rb
24
+ - lib/rouge/themes/thankful_eyes.rb
25
+ - lib/rouge/token.rb
26
+ - lib/rouge/formatters/html.rb
22
27
  - lib/rouge/version.rb
28
+ - lib/rouge/formatter.rb
29
+ - lib/rouge/lexer.rb
30
+ - lib/rouge/theme.rb
23
31
  - lib/rouge.rb
24
32
  homepage: http://github.com/jayferd/rouge
25
33
  licenses: []