pygments.rb 0.2.8 → 0.2.9
Sign up to get free protection for your applications and to get access to all the features.
- data/Rakefile +10 -1
- data/lib/pygments/version.rb +1 -1
- data/vendor/custom_lexers/github.py +330 -0
- data/vendor/pygments-main/AUTHORS +1 -0
- data/vendor/pygments-main/CHANGES +11 -0
- data/vendor/pygments-main/REVISION +1 -1
- data/vendor/pygments-main/pygments/lexers/_mapping.py +6 -0
- data/vendor/pygments-main/pygments/lexers/functional.py +6 -8
- data/vendor/pygments-main/pygments/lexers/github.py +330 -0
- data/vendor/pygments-main/pygments/lexers/math.py +158 -3
- data/vendor/pygments-main/pygments/lexers/text.py +1 -1
- data/vendor/pygments-main/pygments/lexers/web.py +11 -8
- data/vendor/pygments-main/tests/examplefiles/AcidStateAdvanced.hs +209 -0
- data/vendor/pygments-main/tests/examplefiles/string.jl +1031 -0
- data/vendor/pygments-main/tests/examplefiles/test.css +5 -0
- data/vendor/pygments-main/tests/examplefiles/test.xqy +3 -1
- data/vendor/pygments-main/tests/test_basic_api.py +23 -11
- data/vendor/pygments-main/tests/test_cmdline.py +25 -21
- data/vendor/pygments-main/tests/test_examplefiles.py +5 -1
- data/vendor/pygments-main/tests/test_html_formatter.py +24 -16
- data/vendor/pygments-main/tests/test_latex_formatter.py +13 -5
- data/vendor/pygments-main/tests/test_regexlexer.py +1 -1
- data/vendor/pygments-main/tests/test_token.py +7 -10
- data/vendor/pygments-main/tests/test_using_api.py +1 -1
- data/vendor/pygments-main/tests/test_util.py +26 -27
- metadata +9 -7
@@ -25,13 +25,15 @@ define function whatsit($param as xs:string) as xs:string {
|
|
25
25
|
element test { 'a' },
|
26
26
|
attribute foo { "bar" },
|
27
27
|
fn:doc()[ foo/@bar eq $let ],
|
28
|
-
|
28
|
+
//x/with/another/*/*:version/xpath/@attr }
|
29
29
|
};
|
30
30
|
|
31
31
|
let $bride := "Bride"
|
32
32
|
let $test := validate lax { <some>html</some> }
|
33
33
|
let $test := validate strict { <some>html</some> }
|
34
34
|
let $test := validate { <some>html</some> }
|
35
|
+
let $test := $var1/*:Article (: comment here :) [fn:not()]
|
36
|
+
let $test := $var1/@*:name/fn:string()
|
35
37
|
|
36
38
|
let $noop := ordered { $test }
|
37
39
|
let $noop := unordered { $test }
|
@@ -93,7 +93,7 @@ def test_lexer_options():
|
|
93
93
|
'PythonConsoleLexer', 'RConsoleLexer', 'RubyConsoleLexer',
|
94
94
|
'SqliteConsoleLexer', 'MatlabSessionLexer', 'ErlangShellLexer',
|
95
95
|
'BashSessionLexer', 'LiterateHaskellLexer', 'PostgresConsoleLexer',
|
96
|
-
'ElixirConsoleLexer'):
|
96
|
+
'ElixirConsoleLexer', 'JuliaConsoleLexer'):
|
97
97
|
inst = cls(ensurenl=False)
|
98
98
|
ensure(inst.get_tokens('a\nb'), 'a\nb')
|
99
99
|
inst = cls(ensurenl=False, stripall=True)
|
@@ -238,13 +238,17 @@ class FiltersTest(unittest.TestCase):
|
|
238
238
|
for x in filters.FILTERS.keys():
|
239
239
|
lx = lexers.PythonLexer()
|
240
240
|
lx.add_filter(x, **filter_args.get(x, {}))
|
241
|
-
|
241
|
+
fp = open(TESTFILE, 'rb')
|
242
|
+
try:
|
243
|
+
text = fp.read().decode('utf-8')
|
244
|
+
finally:
|
245
|
+
fp.close()
|
242
246
|
tokens = list(lx.get_tokens(text))
|
243
247
|
roundtext = ''.join([t[1] for t in tokens])
|
244
248
|
if x not in ('whitespace', 'keywordcase'):
|
245
249
|
# these filters change the text
|
246
|
-
self.
|
247
|
-
|
250
|
+
self.assertEqual(roundtext, text,
|
251
|
+
"lexer roundtrip with %s filter failed" % x)
|
248
252
|
|
249
253
|
def test_raiseonerror(self):
|
250
254
|
lx = lexers.PythonLexer()
|
@@ -254,24 +258,32 @@ class FiltersTest(unittest.TestCase):
|
|
254
258
|
def test_whitespace(self):
|
255
259
|
lx = lexers.PythonLexer()
|
256
260
|
lx.add_filter('whitespace', spaces='%')
|
257
|
-
|
261
|
+
fp = open(TESTFILE, 'rb')
|
262
|
+
try:
|
263
|
+
text = fp.read().decode('utf-8')
|
264
|
+
finally:
|
265
|
+
fp.close()
|
258
266
|
lxtext = ''.join([t[1] for t in list(lx.get_tokens(text))])
|
259
|
-
self.
|
267
|
+
self.assertFalse(' ' in lxtext)
|
260
268
|
|
261
269
|
def test_keywordcase(self):
|
262
270
|
lx = lexers.PythonLexer()
|
263
271
|
lx.add_filter('keywordcase', case='capitalize')
|
264
|
-
|
272
|
+
fp = open(TESTFILE, 'rb')
|
273
|
+
try:
|
274
|
+
text = fp.read().decode('utf-8')
|
275
|
+
finally:
|
276
|
+
fp.close()
|
265
277
|
lxtext = ''.join([t[1] for t in list(lx.get_tokens(text))])
|
266
|
-
self.
|
278
|
+
self.assertTrue('Def' in lxtext and 'Class' in lxtext)
|
267
279
|
|
268
280
|
def test_codetag(self):
|
269
281
|
lx = lexers.PythonLexer()
|
270
282
|
lx.add_filter('codetagify')
|
271
283
|
text = u'# BUG: text'
|
272
284
|
tokens = list(lx.get_tokens(text))
|
273
|
-
self.
|
274
|
-
self.
|
285
|
+
self.assertEqual('# ', tokens[0][1])
|
286
|
+
self.assertEqual('BUG', tokens[1][1])
|
275
287
|
|
276
288
|
def test_codetag_boundary(self):
|
277
289
|
# ticket #368
|
@@ -279,4 +291,4 @@ class FiltersTest(unittest.TestCase):
|
|
279
291
|
lx.add_filter('codetagify')
|
280
292
|
text = u'# DEBUG: text'
|
281
293
|
tokens = list(lx.get_tokens(text))
|
282
|
-
self.
|
294
|
+
self.assertEqual('# DEBUG: text', tokens[0][1])
|
@@ -38,64 +38,68 @@ class CmdLineTest(unittest.TestCase):
|
|
38
38
|
|
39
39
|
def test_L_opt(self):
|
40
40
|
c, o, e = run_cmdline("-L")
|
41
|
-
self.
|
42
|
-
self.
|
43
|
-
|
41
|
+
self.assertEqual(c, 0)
|
42
|
+
self.assertTrue("Lexers" in o and "Formatters" in o and
|
43
|
+
"Filters" in o and "Styles" in o)
|
44
44
|
c, o, e = run_cmdline("-L", "lexer")
|
45
|
-
self.
|
46
|
-
self.
|
45
|
+
self.assertEqual(c, 0)
|
46
|
+
self.assertTrue("Lexers" in o and "Formatters" not in o)
|
47
47
|
c, o, e = run_cmdline("-L", "lexers")
|
48
|
-
self.
|
48
|
+
self.assertEqual(c, 0)
|
49
49
|
|
50
50
|
def test_O_opt(self):
|
51
51
|
filename = TESTFILE
|
52
52
|
c, o, e = run_cmdline("-Ofull=1,linenos=true,foo=bar",
|
53
53
|
"-fhtml", filename)
|
54
|
-
self.
|
55
|
-
self.
|
56
|
-
self.
|
54
|
+
self.assertEqual(c, 0)
|
55
|
+
self.assertTrue("<html" in o)
|
56
|
+
self.assertTrue('class="linenos"' in o)
|
57
57
|
|
58
58
|
def test_P_opt(self):
|
59
59
|
filename = TESTFILE
|
60
60
|
c, o, e = run_cmdline("-Pfull", "-Ptitle=foo, bar=baz=,",
|
61
61
|
"-fhtml", filename)
|
62
|
-
self.
|
63
|
-
self.
|
62
|
+
self.assertEqual(c, 0)
|
63
|
+
self.assertTrue("<title>foo, bar=baz=,</title>" in o)
|
64
64
|
|
65
65
|
def test_F_opt(self):
|
66
66
|
filename = TESTFILE
|
67
67
|
c, o, e = run_cmdline("-Fhighlight:tokentype=Name.Blubb,"
|
68
68
|
"names=TESTFILE filename",
|
69
69
|
"-fhtml", filename)
|
70
|
-
self.
|
71
|
-
self.
|
70
|
+
self.assertEqual(c, 0)
|
71
|
+
self.assertTrue('<span class="n-Blubb' in o)
|
72
72
|
|
73
73
|
def test_H_opt(self):
|
74
74
|
c, o, e = run_cmdline("-H", "formatter", "html")
|
75
|
-
self.
|
76
|
-
self.
|
75
|
+
self.assertEqual(c, 0)
|
76
|
+
self.assertTrue('HTML' in o)
|
77
77
|
|
78
78
|
def test_S_opt(self):
|
79
79
|
c, o, e = run_cmdline("-S", "default", "-f", "html", "-O", "linenos=1")
|
80
|
-
self.
|
80
|
+
self.assertEqual(c, 0)
|
81
81
|
|
82
82
|
def test_invalid_opts(self):
|
83
83
|
for opts in [("-L", "-lpy"), ("-L", "-fhtml"), ("-L", "-Ox"),
|
84
84
|
("-a",), ("-Sst", "-lpy"), ("-H",),
|
85
85
|
("-H", "formatter"),]:
|
86
|
-
self.
|
86
|
+
self.assertTrue(run_cmdline(*opts)[0] == 2)
|
87
87
|
|
88
88
|
def test_normal(self):
|
89
89
|
# test that cmdline gives the same output as library api
|
90
90
|
from pygments.lexers import PythonLexer
|
91
91
|
from pygments.formatters import HtmlFormatter
|
92
92
|
filename = TESTFILE
|
93
|
-
|
93
|
+
fp = open(filename, 'rb')
|
94
|
+
try:
|
95
|
+
code = fp.read()
|
96
|
+
finally:
|
97
|
+
fp.close()
|
94
98
|
|
95
99
|
output = highlight(code, PythonLexer(), HtmlFormatter())
|
96
100
|
|
97
101
|
c, o, e = run_cmdline("-lpython", "-fhtml", filename)
|
98
102
|
|
99
|
-
self.
|
100
|
-
self.
|
101
|
-
self.
|
103
|
+
self.assertEqual(o, output)
|
104
|
+
self.assertEqual(e, "")
|
105
|
+
self.assertEqual(c, 0)
|
@@ -49,7 +49,11 @@ def test_example_files():
|
|
49
49
|
yield check_lexer, lx, absfn, outfn
|
50
50
|
|
51
51
|
def check_lexer(lx, absfn, outfn):
|
52
|
-
|
52
|
+
fp = open(absfn, 'rb')
|
53
|
+
try:
|
54
|
+
text = fp.read()
|
55
|
+
finally:
|
56
|
+
fp.close()
|
53
57
|
text = text.replace(b('\r\n'), b('\n'))
|
54
58
|
text = text.strip(b('\n')) + b('\n')
|
55
59
|
try:
|
@@ -23,8 +23,11 @@ import support
|
|
23
23
|
|
24
24
|
TESTFILE, TESTDIR = support.location(__file__)
|
25
25
|
|
26
|
-
|
27
|
-
|
26
|
+
fp = uni_open(TESTFILE, encoding='utf-8')
|
27
|
+
try:
|
28
|
+
tokensource = list(PythonLexer().get_tokens(fp.read()))
|
29
|
+
finally:
|
30
|
+
fp.close()
|
28
31
|
|
29
32
|
|
30
33
|
class HtmlFormatterTest(unittest.TestCase):
|
@@ -39,7 +42,7 @@ class HtmlFormatterTest(unittest.TestCase):
|
|
39
42
|
|
40
43
|
stripped_html = re.sub('<.*?>', '', houtfile.getvalue())
|
41
44
|
escaped_text = escape_html(noutfile.getvalue())
|
42
|
-
self.
|
45
|
+
self.assertEqual(stripped_html, escaped_text)
|
43
46
|
|
44
47
|
def test_external_css(self):
|
45
48
|
# test correct behavior
|
@@ -52,13 +55,13 @@ class HtmlFormatterTest(unittest.TestCase):
|
|
52
55
|
fmt1.format(tokensource, tfile)
|
53
56
|
try:
|
54
57
|
fmt2.format(tokensource, tfile)
|
55
|
-
self.
|
58
|
+
self.assertTrue(isfile(join(TESTDIR, 'fmt2.css')))
|
56
59
|
except IOError:
|
57
60
|
# test directory not writable
|
58
61
|
pass
|
59
62
|
tfile.close()
|
60
63
|
|
61
|
-
self.
|
64
|
+
self.assertTrue(isfile(join(dirname(tfile.name), 'fmt1.css')))
|
62
65
|
os.unlink(join(dirname(tfile.name), 'fmt1.css'))
|
63
66
|
try:
|
64
67
|
os.unlink(join(TESTDIR, 'fmt2.css'))
|
@@ -81,7 +84,7 @@ class HtmlFormatterTest(unittest.TestCase):
|
|
81
84
|
fmt = HtmlFormatter(**optdict)
|
82
85
|
fmt.format(tokensource, outfile)
|
83
86
|
html = outfile.getvalue()
|
84
|
-
self.
|
87
|
+
self.assertTrue(re.search("<pre>\s+1\s+2\s+3", html))
|
85
88
|
|
86
89
|
def test_linenos_with_startnum(self):
|
87
90
|
optdict = dict(linenos=True, linenostart=5)
|
@@ -89,7 +92,7 @@ class HtmlFormatterTest(unittest.TestCase):
|
|
89
92
|
fmt = HtmlFormatter(**optdict)
|
90
93
|
fmt.format(tokensource, outfile)
|
91
94
|
html = outfile.getvalue()
|
92
|
-
self.
|
95
|
+
self.assertTrue(re.search("<pre>\s+5\s+6\s+7", html))
|
93
96
|
|
94
97
|
def test_lineanchors(self):
|
95
98
|
optdict = dict(lineanchors="foo")
|
@@ -97,7 +100,7 @@ class HtmlFormatterTest(unittest.TestCase):
|
|
97
100
|
fmt = HtmlFormatter(**optdict)
|
98
101
|
fmt.format(tokensource, outfile)
|
99
102
|
html = outfile.getvalue()
|
100
|
-
self.
|
103
|
+
self.assertTrue(re.search("<pre><a name=\"foo-1\">", html))
|
101
104
|
|
102
105
|
def test_lineanchors_with_startnum(self):
|
103
106
|
optdict = dict(lineanchors="foo", linenostart=5)
|
@@ -105,7 +108,7 @@ class HtmlFormatterTest(unittest.TestCase):
|
|
105
108
|
fmt = HtmlFormatter(**optdict)
|
106
109
|
fmt.format(tokensource, outfile)
|
107
110
|
html = outfile.getvalue()
|
108
|
-
self.
|
111
|
+
self.assertTrue(re.search("<pre><a name=\"foo-5\">", html))
|
109
112
|
|
110
113
|
def test_valid_output(self):
|
111
114
|
# test all available wrappers
|
@@ -119,29 +122,34 @@ class HtmlFormatterTest(unittest.TestCase):
|
|
119
122
|
catname = os.path.join(TESTDIR, 'dtds', 'HTML4.soc')
|
120
123
|
try:
|
121
124
|
import subprocess
|
122
|
-
|
123
|
-
|
125
|
+
po = subprocess.Popen(['nsgmls', '-s', '-c', catname, pathname],
|
126
|
+
stdout=subprocess.PIPE)
|
127
|
+
ret = po.wait()
|
128
|
+
output = po.stdout.read()
|
129
|
+
po.stdout.close()
|
124
130
|
except OSError:
|
125
131
|
# nsgmls not available
|
126
132
|
pass
|
127
133
|
else:
|
128
|
-
|
134
|
+
if ret:
|
135
|
+
print output
|
136
|
+
self.assertFalse(ret, 'nsgmls run reported errors')
|
129
137
|
|
130
138
|
os.unlink(pathname)
|
131
139
|
|
132
140
|
def test_get_style_defs(self):
|
133
141
|
fmt = HtmlFormatter()
|
134
142
|
sd = fmt.get_style_defs()
|
135
|
-
self.
|
143
|
+
self.assertTrue(sd.startswith('.'))
|
136
144
|
|
137
145
|
fmt = HtmlFormatter(cssclass='foo')
|
138
146
|
sd = fmt.get_style_defs()
|
139
|
-
self.
|
147
|
+
self.assertTrue(sd.startswith('.foo'))
|
140
148
|
sd = fmt.get_style_defs('.bar')
|
141
|
-
self.
|
149
|
+
self.assertTrue(sd.startswith('.bar'))
|
142
150
|
sd = fmt.get_style_defs(['.bar', '.baz'])
|
143
151
|
fl = sd.splitlines()[0]
|
144
|
-
self.
|
152
|
+
self.assertTrue('.bar' in fl and '.baz' in fl)
|
145
153
|
|
146
154
|
def test_unicode_options(self):
|
147
155
|
fmt = HtmlFormatter(title=u'Föö',
|
@@ -22,7 +22,11 @@ TESTFILE, TESTDIR = support.location(__file__)
|
|
22
22
|
class LatexFormatterTest(unittest.TestCase):
|
23
23
|
|
24
24
|
def test_valid_output(self):
|
25
|
-
|
25
|
+
fp = open(TESTFILE)
|
26
|
+
try:
|
27
|
+
tokensource = list(PythonLexer().get_tokens(fp.read()))
|
28
|
+
finally:
|
29
|
+
fp.close()
|
26
30
|
fmt = LatexFormatter(full=True, encoding='latin1')
|
27
31
|
|
28
32
|
handle, pathname = tempfile.mkstemp('.tex')
|
@@ -34,14 +38,18 @@ class LatexFormatterTest(unittest.TestCase):
|
|
34
38
|
tfile.close()
|
35
39
|
try:
|
36
40
|
import subprocess
|
37
|
-
|
38
|
-
|
39
|
-
|
41
|
+
po = subprocess.Popen(['latex', '-interaction=nonstopmode',
|
42
|
+
pathname], stdout=subprocess.PIPE)
|
43
|
+
ret = po.wait()
|
44
|
+
output = po.stdout.read()
|
45
|
+
po.stdout.close()
|
40
46
|
except OSError:
|
41
47
|
# latex not available
|
42
48
|
pass
|
43
49
|
else:
|
44
|
-
|
50
|
+
if ret:
|
51
|
+
print output
|
52
|
+
self.assertFalse(ret, 'latex run reported errors')
|
45
53
|
|
46
54
|
os.unlink(pathname)
|
47
55
|
os.chdir(old_wd)
|
@@ -34,6 +34,6 @@ class TupleTransTest(unittest.TestCase):
|
|
34
34
|
def test(self):
|
35
35
|
lx = TestLexer()
|
36
36
|
toks = list(lx.get_tokens_unprocessed('abcde'))
|
37
|
-
self.
|
37
|
+
self.assertEqual(toks,
|
38
38
|
[(0, Text.Root, 'a'), (1, Text.Rag, 'b'), (2, Text.Rag, 'c'),
|
39
39
|
(3, Text.Beer, 'd'), (4, Text.Root, 'e')])
|
@@ -8,8 +8,6 @@
|
|
8
8
|
"""
|
9
9
|
|
10
10
|
import unittest
|
11
|
-
import StringIO
|
12
|
-
import sys
|
13
11
|
|
14
12
|
from pygments import token
|
15
13
|
|
@@ -17,8 +15,7 @@ from pygments import token
|
|
17
15
|
class TokenTest(unittest.TestCase):
|
18
16
|
|
19
17
|
def test_tokentype(self):
|
20
|
-
e = self.
|
21
|
-
r = self.assertRaises
|
18
|
+
e = self.assertEqual
|
22
19
|
|
23
20
|
t = token.String
|
24
21
|
|
@@ -27,13 +24,13 @@ class TokenTest(unittest.TestCase):
|
|
27
24
|
e(t.__class__, token._TokenType)
|
28
25
|
|
29
26
|
def test_functions(self):
|
30
|
-
self.
|
31
|
-
self.
|
32
|
-
self.
|
27
|
+
self.assertTrue(token.is_token_subtype(token.String, token.String))
|
28
|
+
self.assertTrue(token.is_token_subtype(token.String, token.Literal))
|
29
|
+
self.assertFalse(token.is_token_subtype(token.Literal, token.String))
|
33
30
|
|
34
|
-
self.
|
35
|
-
self.
|
36
|
-
self.
|
31
|
+
self.assertTrue(token.string_to_tokentype(token.String) is token.String)
|
32
|
+
self.assertTrue(token.string_to_tokentype('') is token.Token)
|
33
|
+
self.assertTrue(token.string_to_tokentype('String') is token.String)
|
37
34
|
|
38
35
|
def test_sanity_check(self):
|
39
36
|
stp = token.STANDARD_TYPES.copy()
|
@@ -32,7 +32,7 @@ class UsingStateTest(unittest.TestCase):
|
|
32
32
|
expected = [(Text, 'a'), (String, '"'), (Keyword, 'bcd'),
|
33
33
|
(String, '"'), (Text, 'e\n')]
|
34
34
|
t = list(TestLexer().get_tokens('a"bcd"e'))
|
35
|
-
self.
|
35
|
+
self.assertEqual(t, expected)
|
36
36
|
|
37
37
|
def test_error(self):
|
38
38
|
def gen():
|
@@ -8,7 +8,6 @@
|
|
8
8
|
"""
|
9
9
|
|
10
10
|
import unittest
|
11
|
-
import os
|
12
11
|
|
13
12
|
from pygments import util
|
14
13
|
|
@@ -23,7 +22,7 @@ class UtilTest(unittest.TestCase):
|
|
23
22
|
|
24
23
|
def test_getoptions(self):
|
25
24
|
raises = self.assertRaises
|
26
|
-
equals = self.
|
25
|
+
equals = self.assertEqual
|
27
26
|
|
28
27
|
equals(util.get_bool_opt({}, 'a', True), True)
|
29
28
|
equals(util.get_bool_opt({}, 'a', 1), True)
|
@@ -56,20 +55,20 @@ class UtilTest(unittest.TestCase):
|
|
56
55
|
other text
|
57
56
|
"""
|
58
57
|
|
59
|
-
self.
|
60
|
-
self.
|
58
|
+
self.assertEqual(util.docstring_headline(f1), "docstring headline")
|
59
|
+
self.assertEqual(util.docstring_headline(f2), "docstring headline")
|
61
60
|
|
62
61
|
def test_analysator_returns_float(self):
|
63
62
|
# If an analysator wrapped by make_analysator returns a floating point
|
64
63
|
# number, then that number will be returned by the wrapper.
|
65
|
-
self.
|
64
|
+
self.assertEqual(FakeLexer.analyse('0.5'), 0.5)
|
66
65
|
|
67
66
|
def test_analysator_returns_boolean(self):
|
68
67
|
# If an analysator wrapped by make_analysator returns a boolean value,
|
69
68
|
# then the wrapper will return 1.0 if the boolean was True or 0.0 if
|
70
69
|
# it was False.
|
71
|
-
self.
|
72
|
-
self.
|
70
|
+
self.assertEqual(FakeLexer.analyse(True), 1.0)
|
71
|
+
self.assertEqual(FakeLexer.analyse(False), 0.0)
|
73
72
|
|
74
73
|
def test_analysator_raises_exception(self):
|
75
74
|
# If an analysator wrapped by make_analysator raises an exception,
|
@@ -78,40 +77,40 @@ class UtilTest(unittest.TestCase):
|
|
78
77
|
def analyse(text):
|
79
78
|
raise RuntimeError('something bad happened')
|
80
79
|
analyse = util.make_analysator(analyse)
|
81
|
-
self.
|
80
|
+
self.assertEqual(ErrorLexer.analyse(''), 0.0)
|
82
81
|
|
83
82
|
def test_analysator_value_error(self):
|
84
83
|
# When converting the analysator's return value to a float a
|
85
84
|
# ValueError may occur. If that happens 0.0 is returned instead.
|
86
|
-
self.
|
85
|
+
self.assertEqual(FakeLexer.analyse('bad input'), 0.0)
|
87
86
|
|
88
87
|
def test_analysator_type_error(self):
|
89
88
|
# When converting the analysator's return value to a float a
|
90
89
|
# TypeError may occur. If that happens 0.0 is returned instead.
|
91
|
-
self.
|
90
|
+
self.assertEqual(FakeLexer.analyse(None), 0.0)
|
92
91
|
|
93
92
|
def test_shebang_matches(self):
|
94
|
-
self.
|
95
|
-
self.
|
96
|
-
self.
|
97
|
-
|
98
|
-
self.
|
99
|
-
|
93
|
+
self.assertTrue(util.shebang_matches('#!/usr/bin/env python', r'python(2\.\d)?'))
|
94
|
+
self.assertTrue(util.shebang_matches('#!/usr/bin/python2.4', r'python(2\.\d)?'))
|
95
|
+
self.assertTrue(util.shebang_matches('#!/usr/bin/startsomethingwith python',
|
96
|
+
r'python(2\.\d)?'))
|
97
|
+
self.assertTrue(util.shebang_matches('#!C:\\Python2.4\\Python.exe',
|
98
|
+
r'python(2\.\d)?'))
|
100
99
|
|
101
|
-
self.
|
102
|
-
self.
|
103
|
-
self.
|
100
|
+
self.assertFalse(util.shebang_matches('#!/usr/bin/python-ruby', r'python(2\.\d)?'))
|
101
|
+
self.assertFalse(util.shebang_matches('#!/usr/bin/python/ruby', r'python(2\.\d)?'))
|
102
|
+
self.assertFalse(util.shebang_matches('#!', r'python'))
|
104
103
|
|
105
104
|
def test_doctype_matches(self):
|
106
|
-
self.
|
107
|
-
|
108
|
-
self.
|
109
|
-
|
110
|
-
self.
|
105
|
+
self.assertTrue(util.doctype_matches(
|
106
|
+
'<!DOCTYPE html PUBLIC "a"> <html>', 'html.*'))
|
107
|
+
self.assertFalse(util.doctype_matches(
|
108
|
+
'<?xml ?> <DOCTYPE html PUBLIC "a"> <html>', 'html.*'))
|
109
|
+
self.assertTrue(util.html_doctype_matches(
|
111
110
|
'<?xml ?><!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN">'))
|
112
111
|
|
113
112
|
def test_xml(self):
|
114
|
-
self.
|
113
|
+
self.assertTrue(util.looks_like_xml(
|
115
114
|
'<?xml ?><!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN">'))
|
116
|
-
self.
|
117
|
-
self.
|
115
|
+
self.assertTrue(util.looks_like_xml('<html xmlns>abc</html>'))
|
116
|
+
self.assertFalse(util.looks_like_xml('<html>'))
|