pygments.rb 0.2.6 → 0.2.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,3 +1,3 @@
1
1
  module Pygments
2
- VERSION = '0.2.6'
2
+ VERSION = '0.2.7'
3
3
  end
@@ -1 +1 @@
1
- 363aafdb9e13
1
+ 65df4880283d
@@ -1,14 +1,14 @@
1
- POST /demo/submit/ HTTP/1.1
2
- Host: pygments.org
3
- Connection: keep-alivk
4
- Cache-Control: max-age=0
5
- Origin: http://pygments.org
6
- User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_2) AppleWebKit/535.7 (KHTML, like Gecko) Chrome/16.0.912.63 Safari/535.7
7
- Content-Type: application/x-www-form-urlencoded
8
- Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8
9
- Referer: http://pygments.org/
10
- Accept-Encoding: gzip,deflate,sdch
11
- Accept-Language: en-US,en;q=0.8
12
- Accept-Charset: windows-949,utf-8;q=0.7,*;q=0.3
13
-
1
+ POST /demo/submit/ HTTP/1.1
2
+ Host: pygments.org
3
+ Connection: keep-alivk
4
+ Cache-Control: max-age=0
5
+ Origin: http://pygments.org
6
+ User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_2) AppleWebKit/535.7 (KHTML, like Gecko) Chrome/16.0.912.63 Safari/535.7
7
+ Content-Type: application/x-www-form-urlencoded
8
+ Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8
9
+ Referer: http://pygments.org/
10
+ Accept-Encoding: gzip,deflate,sdch
11
+ Accept-Language: en-US,en;q=0.8
12
+ Accept-Charset: windows-949,utf-8;q=0.7,*;q=0.3
13
+
14
14
  name=test&lang=text&code=asdf&user=
@@ -1,27 +1,27 @@
1
- HTTP/1.1 200 OK
2
- Date: Tue, 13 Dec 2011 00:11:44 GMT
3
- Status: 200 OK
4
- X-Transaction: 50b85fff78dab4a3
5
- X-RateLimit-Limit: 150
6
- ETag: "b31143be48ebfe7512b65fe64fe092f3"
7
- X-Frame-Options: SAMEORIGIN
8
- Last-Modified: Tue, 13 Dec 2011 00:11:44 GMT
9
- X-RateLimit-Remaining: 145
10
- X-Runtime: 0.01190
11
- X-Transaction-Mask: a6183ffa5f8ca943ff1b53b5644ef1145f6f285d
12
- Content-Type: application/json; charset=utf-8
13
- Content-Length: 2389
14
- Pragma: no-cache
15
- X-RateLimit-Class: api
16
- X-Revision: DEV
17
- Expires: Tue, 31 Mar 1981 05:00:00 GMT
18
- Cache-Control: no-cache, no-store, must-revalidate, pre-check=0, post-check=0
19
- X-MID: a55f21733bc52bb11d1fc58f9b51b4974fbb8f83
20
- X-RateLimit-Reset: 1323738416
21
- Set-Cookie: k=10.34.234.116.1323735104238974; path=/; expires=Tue, 20-Dec-11 00:11:44 GMT; domain=.twitter.com
22
- Set-Cookie: guest_id=v1%3A13237351042425496; domain=.twitter.com; path=/; expires=Thu, 12-Dec-2013 12:11:44 GMT
23
- Set-Cookie: _twitter_sess=BAh7CDoPY3JlYXRlZF9hdGwrCPS6wjQ0AToHaWQiJTFiMTlhY2E1ZjczYThk%250ANDUwMWQxNjMwZGU2YTQ1ODBhIgpmbGFzaElDOidBY3Rpb25Db250cm9sbGVy%250AOjpGbGFzaDo6Rmxhc2hIYXNoewAGOgpAdXNlZHsA--6b502f30a083e8a41a64f10930e142ea362b1561; domain=.twitter.com; path=/; HttpOnly
24
- Vary: Accept-Encoding
25
- Server: tfe
26
-
1
+ HTTP/1.1 200 OK
2
+ Date: Tue, 13 Dec 2011 00:11:44 GMT
3
+ Status: 200 OK
4
+ X-Transaction: 50b85fff78dab4a3
5
+ X-RateLimit-Limit: 150
6
+ ETag: "b31143be48ebfe7512b65fe64fe092f3"
7
+ X-Frame-Options: SAMEORIGIN
8
+ Last-Modified: Tue, 13 Dec 2011 00:11:44 GMT
9
+ X-RateLimit-Remaining: 145
10
+ X-Runtime: 0.01190
11
+ X-Transaction-Mask: a6183ffa5f8ca943ff1b53b5644ef1145f6f285d
12
+ Content-Type: application/json; charset=utf-8
13
+ Content-Length: 2389
14
+ Pragma: no-cache
15
+ X-RateLimit-Class: api
16
+ X-Revision: DEV
17
+ Expires: Tue, 31 Mar 1981 05:00:00 GMT
18
+ Cache-Control: no-cache, no-store, must-revalidate, pre-check=0, post-check=0
19
+ X-MID: a55f21733bc52bb11d1fc58f9b51b4974fbb8f83
20
+ X-RateLimit-Reset: 1323738416
21
+ Set-Cookie: k=10.34.234.116.1323735104238974; path=/; expires=Tue, 20-Dec-11 00:11:44 GMT; domain=.twitter.com
22
+ Set-Cookie: guest_id=v1%3A13237351042425496; domain=.twitter.com; path=/; expires=Thu, 12-Dec-2013 12:11:44 GMT
23
+ Set-Cookie: _twitter_sess=BAh7CDoPY3JlYXRlZF9hdGwrCPS6wjQ0AToHaWQiJTFiMTlhY2E1ZjczYThk%250ANDUwMWQxNjMwZGU2YTQ1ODBhIgpmbGFzaElDOidBY3Rpb25Db250cm9sbGVy%250AOjpGbGFzaDo6Rmxhc2hIYXNoewAGOgpAdXNlZHsA--6b502f30a083e8a41a64f10930e142ea362b1561; domain=.twitter.com; path=/; HttpOnly
24
+ Vary: Accept-Encoding
25
+ Server: tfe
26
+
27
27
  [{"contributors_enabled":false,"profile_background_tile":true,"followers_count":644,"protected":false,"profile_image_url":"http:\/\/a0.twimg.com\/profile_images\/69064242\/gb_normal.jpg","screen_name":"birkenfeld","default_profile_image":false,"following":null,"friends_count":88,"profile_sidebar_fill_color":"7AC3EE","url":"http:\/\/pythonic.pocoo.org\/","name":"Georg Brandl","default_profile":false,"is_translator":false,"utc_offset":3600,"profile_sidebar_border_color":"65B0DA","description":"","profile_background_image_url_https":"https:\/\/si0.twimg.com\/images\/themes\/theme10\/bg.gif","favourites_count":0,"profile_use_background_image":true,"created_at":"Tue Dec 30 22:25:11 +0000 2008","status":{"retweet_count":10,"favorited":false,"geo":null,"possibly_sensitive":false,"coordinates":null,"in_reply_to_screen_name":null,"in_reply_to_status_id_str":null,"retweeted":false,"in_reply_to_status_id":null,"in_reply_to_user_id_str":null,"created_at":"Sat Jul 09 13:42:35 +0000 2011","truncated":false,"id_str":"89690914515206144","contributors":null,"place":null,"source":"web","in_reply_to_user_id":null,"id":89690914515206144,"retweeted_status":{"retweet_count":10,"favorited":false,"geo":null,"possibly_sensitive":false,"coordinates":null,"in_reply_to_screen_name":null,"in_reply_to_status_id_str":null,"retweeted":false,"in_reply_to_status_id":null,"in_reply_to_user_id_str":null,"created_at":"Sat Jul 09 13:07:04 +0000 2011","truncated":false,"id_str":"89681976755372032","contributors":null,"place":null,"source":"web","in_reply_to_user_id":null,"id":89681976755372032,"text":"Excellent Python posts from @mitsuhiko - http:\/\/t.co\/k1wt6e4 and @ncoghlan_dev - http:\/\/t.co\/eTxacgZ (links fixed)"},"text":"RT @jessenoller: Excellent Python posts from @mitsuhiko - http:\/\/t.co\/k1wt6e4 and @ncoghlan_dev - http:\/\/t.co\/eTxacgZ (links fixed)"},"follow_request_sent":null,"statuses_count":553,"geo_enabled":false,"notifications":null,"profile_text_color":"3D1957","id_str":"18490730","lang":"en","profile_background_image_url":"http:\/\/a1.twimg.com\/images\/themes\/theme10\/bg.gif","profile_image_url_https":"https:\/\/si0.twimg.com\/profile_images\/69064242\/gb_normal.jpg","show_all_inline_media":true,"listed_count":65,"profile_link_color":"FF0000","verified":false,"id":18490730,"time_zone":"Berlin","profile_background_color":"642D8B","location":"Bavaria, Germany"}]
metadata CHANGED
@@ -1,13 +1,13 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: pygments.rb
3
3
  version: !ruby/object:Gem::Version
4
- hash: 27
4
+ hash: 25
5
5
  prerelease:
6
6
  segments:
7
7
  - 0
8
8
  - 2
9
- - 6
10
- version: 0.2.6
9
+ - 7
10
+ version: 0.2.7
11
11
  platform: ruby
12
12
  authors:
13
13
  - Aman Gupta
@@ -15,13 +15,12 @@ autorequire:
15
15
  bindir: bin
16
16
  cert_chain: []
17
17
 
18
- date: 2012-03-02 00:00:00 -08:00
18
+ date: 2012-03-05 00:00:00 +01:00
19
19
  default_executable:
20
20
  dependencies:
21
21
  - !ruby/object:Gem::Dependency
22
- name: rubypython
23
22
  prerelease: false
24
- requirement: &id001 !ruby/object:Gem::Requirement
23
+ version_requirements: &id001 !ruby/object:Gem::Requirement
25
24
  none: false
26
25
  requirements:
27
26
  - - ~>
@@ -32,12 +31,12 @@ dependencies:
32
31
  - 5
33
32
  - 3
34
33
  version: 0.5.3
34
+ name: rubypython
35
35
  type: :runtime
36
- version_requirements: *id001
36
+ requirement: *id001
37
37
  - !ruby/object:Gem::Dependency
38
- name: rake-compiler
39
38
  prerelease: false
40
- requirement: &id002 !ruby/object:Gem::Requirement
39
+ version_requirements: &id002 !ruby/object:Gem::Requirement
41
40
  none: false
42
41
  requirements:
43
42
  - - "="
@@ -48,8 +47,9 @@ dependencies:
48
47
  - 7
49
48
  - 6
50
49
  version: 0.7.6
50
+ name: rake-compiler
51
51
  type: :development
52
- version_requirements: *id002
52
+ requirement: *id002
53
53
  description: pygments.rb exposes the pygments syntax highlighter via embedded python
54
54
  email:
55
55
  - aman@tmm1.net
@@ -148,8 +148,6 @@ files:
148
148
  - vendor/pygments-main/pygments/lexers/math.py
149
149
  - vendor/pygments-main/pygments/lexers/other.py
150
150
  - vendor/pygments-main/pygments/lexers/parsers.py
151
- - vendor/pygments-main/pygments/lexers/postgres.py
152
- - vendor/pygments-main/pygments/lexers/pypylog.py
153
151
  - vendor/pygments-main/pygments/lexers/shell.py
154
152
  - vendor/pygments-main/pygments/lexers/special.py
155
153
  - vendor/pygments-main/pygments/lexers/sql.py
@@ -407,7 +405,7 @@ files:
407
405
  - vendor/pygments-main/tests/test_token.py
408
406
  - vendor/pygments-main/tests/test_using_api.py
409
407
  - vendor/pygments-main/tests/test_util.py
410
- has_rdoc: false
408
+ has_rdoc: true
411
409
  homepage: http://github.com/tmm1/pygments.rb
412
410
  licenses: []
413
411
 
@@ -1,326 +0,0 @@
1
- # -*- coding: utf-8 -*-
2
- """
3
- pygments.lexers.postgres
4
- ~~~~~~~~~~~~~~~~~~~~~~~~
5
-
6
- Lexers for PostgreSQL-specific SQL and psql interactive session.
7
-
8
- `PostgresLexer`
9
- A SQL lexer for the PostgreSQL dialect. Differences w.r.t. the SQL
10
- lexer are:
11
-
12
- - keywords and data types list parsed from the PG docs (run the
13
- `_postgres_builtins` module to update them);
14
- - Content of $-strings parsed using a specific lexer, e.g. the content
15
- of a PL/Python function is parsed using the Python lexer;
16
- - parse PG specific constructs: E-strings, $-strings, U&-strings,
17
- different operators and punctuation.
18
-
19
- `PlPgsqlLexer`
20
- A lexer for the PL/pgSQL language. Adds a few specific construct on
21
- top of the PG SQL lexer (such as <<label>>).
22
-
23
- `PostgresConsoleLexer`
24
- A lexer to highlight an interactive psql session:
25
-
26
- - identifies the prompt and does its best to detect the end of command
27
- in multiline statement where not all the lines are prefixed by a
28
- prompt, telling them apart from the output;
29
- - highlights errors in the output and notification levels;
30
- - handles psql backslash commands.
31
-
32
- The ``tests/examplefiles`` contains a few test files with data to be
33
- parsed by these lexers.
34
-
35
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
36
- :license: BSD, see LICENSE for details.
37
- """
38
-
39
- import re
40
- from copy import deepcopy
41
-
42
- from pygments.lexer import Lexer, RegexLexer, do_insertions
43
- from pygments.token import Punctuation, \
44
- Text, Comment, Operator, Keyword, Name, String, Number, Generic
45
- from pygments.lexers import get_lexer_by_name, ClassNotFound
46
-
47
- from pygments.lexers._postgres_builtins import KEYWORDS, DATATYPES, \
48
- PSEUDO_TYPES, PLPGSQL_KEYWORDS
49
-
50
-
51
- __all__ = ['PostgresLexer', 'PlPgsqlLexer', 'PostgresConsoleLexer']
52
-
53
- line_re = re.compile('.*?\n')
54
-
55
- language_re = re.compile(r"\s+LANGUAGE\s+'?(\w+)'?", re.IGNORECASE)
56
-
57
- def language_callback(lexer, match):
58
- """Parse the content of a $-string using a lexer
59
-
60
- The lexer is chosen looking for a nearby LANGUAGE.
61
-
62
- Note: this function should have been a `PostgresBase` method, but the
63
- rules deepcopy fails in this case.
64
- """
65
- l = None
66
- m = language_re.match(lexer.text[match.end():match.end()+100])
67
- if m is not None:
68
- l = lexer._get_lexer(m.group(1))
69
- else:
70
- m = list(language_re.finditer(
71
- lexer.text[max(0, match.start()-100):match.start()]))
72
- if m:
73
- l = lexer._get_lexer(m[-1].group(1))
74
-
75
- if l:
76
- yield (match.start(1), String, match.group(1))
77
- for x in l.get_tokens_unprocessed(match.group(2)):
78
- yield x
79
- yield (match.start(3), String, match.group(3))
80
-
81
- else:
82
- yield (match.start(), String, match.group())
83
-
84
-
85
- class PostgresBase(object):
86
- """Base class for Postgres-related lexers.
87
-
88
- This is implemented as a mixin to avoid the Lexer metaclass kicking in.
89
- this way the different lexer don't have a common Lexer ancestor. If they
90
- had, _tokens could be created on this ancestor and not updated for the
91
- other classes, resulting e.g. in PL/pgSQL parsed as SQL. This shortcoming
92
- seem to suggest that regexp lexers are not really subclassable.
93
-
94
- `language_callback` should really be our method, but this breaks deepcopy.
95
- """
96
- def get_tokens_unprocessed(self, text, *args):
97
- # Have a copy of the entire text to be used by `language_callback`.
98
- self.text = text
99
- for x in super(PostgresBase, self).get_tokens_unprocessed(
100
- text, *args):
101
- yield x
102
-
103
- def _get_lexer(self, lang):
104
- if lang.lower() == 'sql':
105
- return get_lexer_by_name('postgresql', **self.options)
106
-
107
- tries = [ lang ]
108
- if lang.startswith('pl'):
109
- tries.append(lang[2:])
110
- if lang.endswith('u'):
111
- tries.append(lang[:-1])
112
- if lang.startswith('pl') and lang.endswith('u'):
113
- tries.append(lang[2:-1])
114
-
115
- for l in tries:
116
- try:
117
- return get_lexer_by_name(l, **self.options)
118
- except ClassNotFound:
119
- pass
120
- else:
121
- # TODO: better logging
122
- # print >>sys.stderr, "language not found:", lang
123
- return None
124
-
125
-
126
- class PostgresLexer(PostgresBase, RegexLexer):
127
- """
128
- Lexer for the PostgreSQL dialect of SQL.
129
-
130
- *New in Pygments 1.5.*
131
- """
132
-
133
- name = 'PostgreSQL SQL dialect'
134
- aliases = ['postgresql', 'postgres']
135
- mimetypes = ['text/x-postgresql']
136
-
137
- flags = re.IGNORECASE
138
- tokens = {
139
- 'root': [
140
- (r'\s+', Text),
141
- (r'--.*?\n', Comment.Single),
142
- (r'/\*', Comment.Multiline, 'multiline-comments'),
143
- (r'(' + '|'.join([s.replace(" ", "\s+")
144
- for s in DATATYPES + PSEUDO_TYPES])
145
- + r')\b', Name.Builtin),
146
- (r'(' + '|'.join(KEYWORDS) + r')\b', Keyword),
147
- (r'[+*/<>=~!@#%^&|`?^-]+', Operator),
148
- (r'::', Operator), # cast
149
- (r'\$\d+', Name.Variable),
150
- (r'([0-9]*\.[0-9]*|[0-9]+)(e[+-]?[0-9]+)?', Number.Float),
151
- (r'[0-9]+', Number.Integer),
152
- (r"(E|U&)?'(''|[^'])*'", String.Single),
153
- (r'(U&)?"(""|[^"])*"', String.Name), # quoted identifier
154
- (r'(?ms)(\$[^\$]*\$)(.*?)(\1)', language_callback),
155
- (r'[a-zA-Z_][a-zA-Z0-9_]*', Name),
156
-
157
- # psql variable in SQL
158
- (r""":(['"]?)[a-z][a-z0-9_]*\b\1""", Name.Variable),
159
-
160
- (r'[;:()\[\]\{\},\.]', Punctuation),
161
- ],
162
- 'multiline-comments': [
163
- (r'/\*', Comment.Multiline, 'multiline-comments'),
164
- (r'\*/', Comment.Multiline, '#pop'),
165
- (r'[^/\*]+', Comment.Multiline),
166
- (r'[/*]', Comment.Multiline)
167
- ],
168
- }
169
-
170
-
171
- class PlPgsqlLexer(PostgresBase, RegexLexer):
172
- """
173
- Handle the extra syntax in Pl/pgSQL language.
174
-
175
- *New in Pygments 1.5.*
176
- """
177
- name = 'PL/pgSQL'
178
- aliases = ['plpgsql']
179
- mimetypes = ['text/x-plpgsql']
180
-
181
- flags = re.IGNORECASE
182
- tokens = deepcopy(PostgresLexer.tokens)
183
-
184
- # extend the keywords list
185
- for i, pattern in enumerate(tokens['root']):
186
- if pattern[1] == Keyword:
187
- tokens['root'][i] = (
188
- r'(' + '|'.join(KEYWORDS + PLPGSQL_KEYWORDS) + r')\b',
189
- Keyword)
190
- del i
191
- break
192
- else:
193
- assert 0, "SQL keywords not found"
194
-
195
- # Add specific PL/pgSQL rules (before the SQL ones)
196
- tokens['root'][:0] = [
197
- (r'\%[a-z][a-z0-9_]*\b', Name.Builtin), # actually, a datatype
198
- (r':=', Operator),
199
- (r'\<\<[a-z][a-z0-9_]*\>\>', Name.Label),
200
- (r'\#[a-z][a-z0-9_]*\b', Keyword.Pseudo), # #variable_conflict
201
- ]
202
-
203
-
204
- class PsqlRegexLexer(PostgresBase, RegexLexer):
205
- """
206
- Extend the PostgresLexer adding support specific for psql commands.
207
-
208
- This is not a complete psql lexer yet as it lacks prompt support
209
- and output rendering.
210
- """
211
-
212
- name = 'PostgreSQL console - regexp based lexer'
213
- aliases = [] # not public
214
-
215
- flags = re.IGNORECASE
216
- tokens = deepcopy(PostgresLexer.tokens)
217
-
218
- tokens['root'].append(
219
- (r'\\[^\s]+', Keyword.Pseudo, 'psql-command'))
220
- tokens['psql-command'] = [
221
- (r'\n', Text, 'root'),
222
- (r'\s+', Text),
223
- (r'\\[^\s]+', Keyword.Pseudo),
224
- (r""":(['"]?)[a-z][a-z0-9_]*\b\1""", Name.Variable),
225
- (r"'(''|[^'])*'", String.Single),
226
- (r"`([^`])*`", String.Backtick),
227
- (r"[^\s]+", String.Symbol),
228
- ]
229
-
230
- re_prompt = re.compile(r'^(\S.*?)??[=\-\(\$\'\"][#>]')
231
- re_psql_command = re.compile(r'\s*\\')
232
- re_end_command = re.compile(r';\s*(--.*?)?$')
233
- re_psql_command = re.compile(r'(\s*)(\\.+?)(\s+)$')
234
- re_error = re.compile(r'(ERROR|FATAL):')
235
- re_message = re.compile(
236
- r'((?:DEBUG|INFO|NOTICE|WARNING|ERROR|'
237
- r'FATAL|HINT|DETAIL|CONTEXT|LINE [0-9]+):)(.*?\n)')
238
-
239
- def lookahead(x):
240
- """Wrap an iterator and allow pushing back an item."""
241
- for i in x:
242
- while 1:
243
- i = yield i
244
- if i is None:
245
- break
246
- yield i
247
-
248
-
249
- class PostgresConsoleLexer(Lexer):
250
- """
251
- Lexer for psql sessions.
252
-
253
- *New in Pygments 1.5.*
254
- """
255
-
256
- name = 'PostgreSQL console (psql)'
257
- aliases = ['psql', 'postgresql-console', 'postgres-console']
258
- mimetypes = ['text/x-postgresql-psql']
259
-
260
- def get_tokens_unprocessed(self, data):
261
- sql = PsqlRegexLexer(**self.options)
262
-
263
- lines = lookahead(line_re.findall(data))
264
-
265
- # prompt-output cycle
266
- while 1:
267
-
268
- # consume the lines of the command: start with an optional prompt
269
- # and continue until the end of command is detected
270
- curcode = ''
271
- insertions = []
272
- while 1:
273
- try:
274
- line = lines.next()
275
- except StopIteration:
276
- # allow the emission of partially collected items
277
- # the repl loop will be broken below
278
- break
279
-
280
- # Identify a shell prompt in case of psql commandline example
281
- if line.startswith('$') and not curcode:
282
- lexer = get_lexer_by_name('console', **self.options)
283
- for x in lexer.get_tokens_unprocessed(line):
284
- yield x
285
- break
286
-
287
- # Identify a psql prompt
288
- mprompt = re_prompt.match(line)
289
- if mprompt is not None:
290
- insertions.append((len(curcode),
291
- [(0, Generic.Prompt, mprompt.group())]))
292
- curcode += line[len(mprompt.group()):]
293
- else:
294
- curcode += line
295
-
296
- # Check if this is the end of the command
297
- # TODO: better handle multiline comments at the end with
298
- # a lexer with an external state?
299
- if re_psql_command.match(curcode) \
300
- or re_end_command.search(curcode):
301
- break
302
-
303
- # Emit the combined stream of command and prompt(s)
304
- for item in do_insertions(insertions,
305
- sql.get_tokens_unprocessed(curcode)):
306
- yield item
307
-
308
- # Emit the output lines
309
- out_token = Generic.Output
310
- while 1:
311
- line = lines.next()
312
- mprompt = re_prompt.match(line)
313
- if mprompt is not None:
314
- # push the line back to have it processed by the prompt
315
- lines.send(line)
316
- break
317
-
318
- mmsg = re_message.match(line)
319
- if mmsg is not None:
320
- if mmsg.group(1).startswith("ERROR") \
321
- or mmsg.group(1).startswith("FATAL"):
322
- out_token = Generic.Error
323
- yield (mmsg.start(1), Generic.Strong, mmsg.group(1))
324
- yield (mmsg.start(2), out_token, mmsg.group(2))
325
- else:
326
- yield (0, out_token, line)
@@ -1,85 +0,0 @@
1
- # -*- coding: utf-8 -*-
2
- """
3
- pygments.lexers.pypylog
4
- ~~~~~~~~~~~~~~~~~~~~~~~
5
-
6
- Lexer for pypy log files.
7
-
8
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
9
- :license: BSD, see LICENSE for details.
10
- """
11
-
12
- from pygments.lexer import RegexLexer, bygroups, include
13
- from pygments.token import Text, Keyword, Number, Comment, Punctuation, Name, \
14
- String
15
-
16
-
17
- __all__ = ["PyPyLogLexer"]
18
-
19
-
20
- class PyPyLogLexer(RegexLexer):
21
- """
22
- Lexer for PyPy log files.
23
-
24
- *New in Pygments 1.5.*
25
- """
26
- name = "PyPy Log"
27
- aliases = ["pypylog", "pypy"]
28
- filenames = ["*.pypylog"]
29
- mimetypes = ['application/x-pypylog']
30
-
31
- tokens = {
32
- "root": [
33
- (r"\[\w+\] {jit-log-.*?$", Keyword, "jit-log"),
34
- (r"\[\w+\] {jit-backend-counts$", Keyword, "jit-backend-counts"),
35
- include("extra-stuff"),
36
- ],
37
- "jit-log": [
38
- (r"\[\w+\] jit-log-.*?}$", Keyword, "#pop"),
39
-
40
- (r"[ifp]\d+", Name),
41
- (r"ptr\d+", Name),
42
- (r"(\()([\w_]+(?:\.[\w_]+)?)(\))",
43
- bygroups(Punctuation, Name.Builtin, Punctuation)),
44
- (r"[\[\]=,()]", Punctuation),
45
- (r"(\d+\.\d+|inf|-inf)", Number.Float),
46
- (r"-?\d+", Number.Integer),
47
- (r"'.*'", String),
48
- (r"(None|descr|ConstClass|ConstPtr)", Name),
49
- (r"<.*?>", Name.Builtin),
50
- (r"(debug_merge_point|jump|finish)", Name.Class),
51
- (r"(int_add_ovf|int_add|int_sub_ovf|int_sub|int_mul_ovf|int_mul|"
52
- r"int_floordiv|int_mod|int_lshift|int_rshift|int_and|int_or|"
53
- r"int_xor|int_eq|int_ne|int_ge|int_gt|int_le|int_lt|int_is_zero|"
54
- r"int_is_true|"
55
- r"uint_floordiv|uint_ge|uint_lt|"
56
- r"float_add|float_sub|float_mul|float_truediv|"
57
- r"float_eq|float_ne|float_ge|float_gt|float_le|float_lt|float_abs|"
58
- r"ptr_eq|"
59
- r"cast_int_to_float|cast_float_to_int|cast_opaque_ptr|"
60
- r"force_token|quasiimmut_field|same_as|virtual_ref_finish|virtual_ref|"
61
- r"call_may_force|call_assembler|call_loopinvariant|call_release_gil|call_pure|call|"
62
- r"new_with_vtable|new_array|newstr|newunicode|new|"
63
- r"arraylen_gc|"
64
- r"getarrayitem_gc_pure|getarrayitem_gc|setarrayitem_gc|"
65
- r"getarrayitem_raw|setarrayitem_raw|getfield_gc_pure|getfield_gc|"
66
- r"getfield_raw|setfield_gc|setfield_raw|"
67
- r"strgetitem|strsetitem|strlen|copystrcontent|"
68
- r"unicodegetitem|unicodesetitem|unicodelen|"
69
- r"guard_true|guard_false|guard_value|guard_isnull|"
70
- r"guard_nonnull_class|guard_nonnull|guard_class|guard_no_overflow|"
71
- r"guard_not_forced|guard_no_exception|guard_not_invalidated)",
72
- Name.Builtin),
73
- include("extra-stuff"),
74
- ],
75
- "jit-backend-counts": [
76
- (r"\[\w+\] jit-backend-counts}$", Keyword, "#pop"),
77
- (r"[:]", Punctuation),
78
- (r"\d+", Number),
79
- include("extra-stuff"),
80
- ],
81
- "extra-stuff": [
82
- (r"[\n\s]+", Text),
83
- (r"#.*?$", Comment),
84
- ],
85
- }