tdp4r 1.3.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/test/test_tdp.rb ADDED
@@ -0,0 +1,472 @@
1
+ require 'test/unit'
2
+ require 'tdp'
3
+ require 'tdputils'
4
+
5
+ class Tokens
6
+ include Enumerable
7
+
8
+ def initialize(str)
9
+ @str = str
10
+ end
11
+
12
+ def each()
13
+ @str.each_byte{|c| yield(c.chr) }
14
+ end
15
+ end
16
+
17
+ class Calculator
18
+ include TDParser
19
+ include TDPUtils
20
+
21
+ def expr1
22
+ rule(:expr2) - ((token("+")|token("-")) - rule(:expr2))*0 >> proc{|x|
23
+ x[1].inject(x[0]){|n,y|
24
+ case y[0]
25
+ when "+"
26
+ n + y[1]
27
+ when "-"
28
+ n - y[1]
29
+ end
30
+ }
31
+ }
32
+ end
33
+
34
+ def expr2
35
+ rule(:prim) - ((token("*")|token("/")) - rule(:prim))*0 >> proc{|x|
36
+ x[1].inject(x[0]){|n,y|
37
+ case y[0]
38
+ when "*"
39
+ n * y[1]
40
+ when "/"
41
+ n / y[1]
42
+ end
43
+ }
44
+ }
45
+ end
46
+
47
+ def prim
48
+ token(:int) >> proc{|x| x[0].value.to_i } |
49
+ token("(") - rule(:expr1) - token(")") >> proc{|x| x[1] }
50
+ end
51
+
52
+ def parse(str)
53
+ tokenizer = StringTokenizer.new({
54
+ /\d+/ => :int,
55
+ })
56
+ expr1.parse(tokenizer.generate(str))
57
+ end
58
+ end
59
+
60
+ Calculator2 = TDParser.define{|g|
61
+ g.plus = "+"
62
+ g.minus = "-"
63
+ g.mult = "*"
64
+ g.div = "/"
65
+
66
+ g.expr1 =
67
+ chainl(prim, mult|div, plus|minus){|x|
68
+ case x[1]
69
+ when "+"
70
+ x[0] + x[2]
71
+ when "-"
72
+ x[0] - x[2]
73
+ when "*"
74
+ x[0] * x[2]
75
+ when "/"
76
+ x[0] / x[2]
77
+ end
78
+ }
79
+
80
+ g.prim =
81
+ g.token(:int) >> proc{|x| x[0].value.to_i } |
82
+ g.token("(") - g.expr1 - g.token(")") >> proc{|x| x[1] }
83
+
84
+ def parse(str)
85
+ tokenizer = TDPUtils::StringTokenizer.new({
86
+ /\d+/ => :int,
87
+ })
88
+ expr1.parse(tokenizer.generate(str))
89
+ end
90
+ }
91
+
92
+ LeftResursiveCalculator = TDParser.define{|g|
93
+ g.plus = "+"
94
+ g.minus = "-"
95
+ g.mult = "*"
96
+ g.div = "/"
97
+
98
+ g.expr1 =
99
+ (g.plus|g.minus) - g.expr1 >> proc{|x|
100
+ n = x[0]
101
+ x[1].each{|y|
102
+ case y[0]
103
+ when "+"
104
+ n += y[1]
105
+ when "-"
106
+ n -= y[1]
107
+ end
108
+ }
109
+ n
110
+ }
111
+ g.expr1 |= g.expr2
112
+
113
+ g.expr2 =
114
+ (g.mult|g.div) - g.expr2 >> proc{|x|
115
+ n = x[0]
116
+ x[1].each{|y|
117
+ case y[0]
118
+ when "*"
119
+ n *= y[1]
120
+ when "/"
121
+ n = n / y[1]
122
+ end
123
+ }
124
+ n
125
+ }
126
+ g.expr2 |= g.prim
127
+
128
+ g.prim =
129
+ g.token(:int) >> proc{|x| x[0].value.to_i } |
130
+ g.token("(") - g.expr1 - g.token(")") >> proc{|x| x[1] }
131
+
132
+ def parse(str)
133
+ tokenizer = TDPUtils::StringTokenizer.new({
134
+ /\d+/ => :int,
135
+ })
136
+ expr1.parse(tokenizer.generate(str))
137
+ end
138
+ }
139
+
140
+
141
+ class TestTDParser < Test::Unit::TestCase
142
+ include TDParser
143
+ include TDPUtils
144
+
145
+ def setup()
146
+ @calc = Calculator.new()
147
+ end
148
+
149
+ def test_sequence1()
150
+ abc = "abc"
151
+ rule = token("a") - token("b") - token("c") >> proc{|arg| arg.join() }
152
+ assert_equal(abc, rule.parse(Tokens.new(abc)))
153
+ end
154
+
155
+ def test_sequence2()
156
+ abc = "aBc"
157
+ rule = token("a") - token("b") - token("c") >> proc{|arg| arg.join() }
158
+ assert_equal(nil, rule.parse(Tokens.new(abc)))
159
+ end
160
+
161
+ def test_sequence3()
162
+ abc = "abC"
163
+ rule = token("a") - token("b") - token("c") >> proc{|arg| arg.join() }
164
+ assert_equal(nil, rule.parse(Tokens.new(abc)))
165
+ end
166
+
167
+ def test_sequence4()
168
+ abc = "ab"
169
+ rule = token("a") - token("b") - token("c") >> proc{|arg| arg.join() }
170
+ assert_equal(nil, rule.parse(Tokens.new(abc)))
171
+ end
172
+
173
+ def test_sequence5()
174
+ abc = "abc"
175
+ rule = any() - any() - any() >> proc{|arg| arg.join() }
176
+ assert_equal(abc, rule.parse(Tokens.new(abc)))
177
+ end
178
+
179
+ def test_sequence6()
180
+ abc = "abc"
181
+ rule = any() - any() - any() - empty() >> proc{|arg| arg }
182
+ assert_equal(["a","b","c",nil], rule.parse(Tokens.new(abc)))
183
+ end
184
+
185
+ def test_sequence7()
186
+ abc = "abc"
187
+ rule = any() - any() - (empty()|any()) >> proc{|arg| arg }
188
+ assert_equal(["a","b",nil], rule.parse(Tokens.new(abc)))
189
+ end
190
+
191
+ def test_sequence8()
192
+ abc = "abc"
193
+ rule = any() - any() - (any()|empty()) >> proc{|arg| arg }
194
+ assert_equal(["a","b","c"], rule.parse(Tokens.new(abc)))
195
+ end
196
+
197
+ def test_sequence9()
198
+ abc = "abc"
199
+ rule = any() - any() - any() - (any()|empty()) >> proc{|arg| arg }
200
+ assert_equal(["a","b","c",nil], rule.parse(Tokens.new(abc)))
201
+ end
202
+
203
+ def test_sequence10()
204
+ abc = "ab"
205
+ rule = any() - any() - none() >> proc{|arg| arg }
206
+ assert_equal(["a","b",nil], rule.parse(Tokens.new(abc)))
207
+ end
208
+
209
+ def test_sequence11()
210
+ abc = "abc"
211
+ rule = any() - any() - none() >> proc{|arg| arg }
212
+ assert_equal(nil, rule.parse(Tokens.new(abc)))
213
+ end
214
+
215
+ def test_sequence12()
216
+ abc = "abc"
217
+ rule = any() - any() - ~token("c") >> proc{|arg| arg }
218
+ assert_equal(nil, rule.parse(Tokens.new(abc)))
219
+ end
220
+
221
+ def test_sequence13()
222
+ abc = "aba"
223
+ rule = any() - any() - ~token("c") - any() >> proc{|arg| arg }
224
+ assert_equal(["a","b",["a"],"a"], rule.parse(Tokens.new(abc)))
225
+ end
226
+
227
+ def test_sequence14()
228
+ abc = "aba"
229
+ rule1 = token("a") - token("b")
230
+ rule2 = ~rule(rule1) - any() - any() - any() >> proc{|arg| arg }
231
+ assert_equal(nil, rule2.parse(Tokens.new(abc)))
232
+ end
233
+
234
+ def test_sequence15()
235
+ abc = "aca"
236
+ rule1 = token("a") - token("b")
237
+ rule2 = ~rule(rule1) - any() - any() - any() >> proc{|arg| arg }
238
+ assert_equal([["a", "c"], "a", "c", "a"], rule2.parse(Tokens.new(abc)))
239
+ end
240
+
241
+ def test_generator1()
242
+ generator = TDParser::TokenGenerator.new{|x| ["a","b","c"].each{|e| x.yield(e)} }
243
+ rule = any() - any() - any() - (any()|empty()) >> proc{|arg| arg }
244
+ assert_equal(["a","b","c",nil], rule.parse(generator))
245
+ end
246
+
247
+ def test_generator2()
248
+ rule = any() - any() - any() - (any()|empty()) >> proc{|arg| arg }
249
+ result = rule.parse{|x| ["a","b","c"].each{|e| x.yield(e)} }
250
+ assert_equal(["a","b","c",nil], result)
251
+ end
252
+
253
+ def test_iteration1()
254
+ abc = "abcabc"
255
+ rule = (token("a") - (token("b")|token("B")) - (token("c")|token("C")))*0 >> proc{|arg| arg.join() }
256
+ assert_equal(abc, rule.parse(Tokens.new(abc)))
257
+ end
258
+
259
+ def test_iteration2()
260
+ abc = "aBcabc"
261
+ rule = (token("a") - (token("b")|token("B")) - (token("c")|token("C")))*0 >> proc{|arg| arg.join() }
262
+ assert_equal(abc, rule.parse(Tokens.new(abc)))
263
+ end
264
+
265
+ def test_iteration3()
266
+ abc = ""
267
+ rule = (token("a") - (token("b")|token("B")) - (token("c")|token("C")))*0 >> proc{|arg| arg.join() }
268
+ assert_equal(abc, rule.parse(Tokens.new(abc)))
269
+ end
270
+
271
+ def test_iteration4()
272
+ abc = ""
273
+ rule = (token("a") - (token("b")|token("B")) - (token("c")|token("C")))*1 >> proc{|arg| arg.join() }
274
+ assert_equal(nil, rule.parse(Tokens.new(abc)))
275
+ end
276
+
277
+ def test_iteration5()
278
+ abc = "aBCAbc"
279
+ rule = (token("a") - (token("b")|token("B")) - (token("c")|token("C")))*0 >> proc{|arg| arg.join() }
280
+ assert_equal("aBC", rule.parse(Tokens.new(abc)))
281
+ assert_equal("A", rule.peek())
282
+ end
283
+
284
+ def test_iteration6()
285
+ abc = "aBCaBcd"
286
+ rule = (token("a") - (token("b")|token("B")) - (token("c")|token("C")))*0 >> proc{|arg| arg.join() }
287
+ assert_equal("aBCaBc", rule.parse(Tokens.new(abc)))
288
+ assert_equal("d", rule.peek())
289
+ end
290
+
291
+ def test_iteration7()
292
+ buff = ["a","b","b","b","c"]
293
+ rule = token("a") - token("b")*1 - token("c") >> proc{|x| x}
294
+ assert_equal(["a", [["b"],["b"],["b"]], "c"], rule.parse(buff))
295
+ end
296
+
297
+ def test_iteration8()
298
+ buff = ["a","b","b","b","c"]
299
+ rule = token("a") - token("b")*4 - token("c") >> proc{|x| x}
300
+ assert_equal(nil, rule.parse(buff))
301
+ end
302
+
303
+ def test_iteration9()
304
+ buff = ["a","b","c"]
305
+ rule = token("a") - token("b")*(2..4) - token("c") >> proc{|x| x}
306
+ assert_equal(nil, rule.parse(buff))
307
+ end
308
+
309
+ def test_iteration10()
310
+ buff = ["a","b","b","b","c"]
311
+ rule = token("a") - token("b")*(2..4) - token("c") >> proc{|x| x}
312
+ assert_equal(["a", [["b"], ["b"], ["b"]], "c"], rule.parse(buff))
313
+ end
314
+
315
+ def test_iteration11()
316
+ buff = ["a","b","a","b","c"]
317
+ rule = (token("a") - token("b"))*1 - token("c") >> proc{|x| x}
318
+ assert_equal([[["a", "b"],["a","b"]], "c"], rule.parse(buff))
319
+ end
320
+
321
+ def test_iteration12()
322
+ buff = ["c"]
323
+ rule = token("c") - (token("a") - token("b"))*1 >> proc{|x| x}
324
+ assert_equal(nil, rule.parse(buff))
325
+ end
326
+
327
+ def test_regex_match()
328
+ rule = token(/\d+/, :=~){|x| /(\d+)/ =~ x; $1.to_i } >> proc{|x| x[0] }
329
+ assert_equal(10, rule.parse(["10"]))
330
+ end
331
+
332
+ def test_reference1()
333
+ buff = ["a","b","c"]
334
+ rule = token("a")/:a - token("b")/:b - token("c")/:c >> proc{|x| [x[:a],x[:b],x[:c]]}
335
+ assert_equal([["a"],["b"],["c"]], rule.parse(buff))
336
+ end
337
+
338
+ def test_reference2()
339
+ buff = ["a","b","c"]
340
+ rule = (token("a") - token("b"))/:n - token("c") >> proc{|x| x[:n]}
341
+ assert_equal(["a","b"], rule.parse(buff))
342
+ end
343
+
344
+ def test_reference3()
345
+ buff = ["a","b","c"]
346
+ stack = []
347
+ rule = (token("a") - token("b"))%stack - token("c")%stack >> proc{|x| x}
348
+ assert_equal(["a","b","c"], rule.parse(buff))
349
+ assert_equal([["a","b"],["c"]], stack)
350
+ end
351
+
352
+ def test_backref1()
353
+ buff = ["a","b","a"]
354
+ rule = token(/\w/)/:x - token("b") - backref(:x) >> proc{|x| x}
355
+ assert_equal(["a","b","a"], rule.parse(buff))
356
+ end
357
+
358
+ def test_backref2()
359
+ buff = ["a","b","c"]
360
+ rule = token(/\w/)/:x - token("b") - backref(:x) >> proc{|x| x}
361
+ assert_equal(nil, rule.parse(buff))
362
+ end
363
+
364
+ def test_backref3()
365
+ buff = ["a","b","a","b","a","b"]
366
+ rule = (token(/\w/) - token(/\w/))/:x - backref(:x)*0 >> proc{|x| x}
367
+ assert_equal(["a","b",[["a","b"],["a","b"]]], rule.parse(buff))
368
+ end
369
+
370
+ def test_stackref1()
371
+ buff = ["a","b","a"]
372
+ stack = []
373
+ rule = token(/\w/)%stack - token("b") - stackref(stack) >> proc{|x| x}
374
+ assert_equal(["a","b","a"], rule.parse(buff))
375
+ end
376
+
377
+ def test_backref2()
378
+ buff = ["a","b","c"]
379
+ stack = []
380
+ rule = token(/\w/)%stack - token("b") - stackref(stack) >> proc{|x| x}
381
+ assert_equal(nil, rule.parse(buff))
382
+ end
383
+
384
+ def test_backref3()
385
+ buff = ["a","b","a","b","a","b"]
386
+ stack = []
387
+ rule = (token(/\w/) - token(/\w/))%stack - (stackref(stack)%stack)*0 >> proc{|x| x}
388
+ assert_equal(["a","b",[["a","b"],["a","b"]]], rule.parse(buff))
389
+
390
+ buff = ["a","b","a","b","a","b"]
391
+ stack = []
392
+ rule = (token(/\w/) - token(/\w/))%stack - stackref(stack)*0 >> proc{|x| x}
393
+ assert_equal(["a","b",[["a","b"]]], rule.parse(buff))
394
+ end
395
+
396
+ def test_chainl1()
397
+ buff = ["3", "-", "2", "-", "1"]
398
+ rule = chainl(token(/\d+/) >> Proc.new{|x|x[0].to_i}, token("-")){|x|
399
+ x[0] - x[2]
400
+ }
401
+ assert_equal(0, rule.parse(buff))
402
+ end
403
+
404
+ def test_chainl2()
405
+ buff = ["3", "-", "2", "*", "2", "-", "1"]
406
+ rule = chainl(token(/\d+/) >> Proc.new{|x|x[0].to_i}, token("*"), token("-")){|x|
407
+ case x[1]
408
+ when "-"
409
+ x[0] - x[2]
410
+ when "*"
411
+ x[0] * x[2]
412
+ end
413
+ }
414
+ assert_equal(-2, rule.parse(buff))
415
+ end
416
+
417
+ def test_chainr1()
418
+ buff = ["3", "-", "2", "-", "1"]
419
+ rule = chainr(token(/\d+/) >> Proc.new{|x|x[0].to_i}, token("-")){|x|
420
+ x[0].to_i - x[2].to_i
421
+ }
422
+ assert_equal(2, rule.parse(buff))
423
+ end
424
+
425
+ def test_chainr2()
426
+ buff = ["3", "-", "2", "*", "2", "-", "1"]
427
+ rule = chainr(token(/\d+/) >> Proc.new{|x|x[0].to_i}, token("*"), token("-")){|x|
428
+ case x[1]
429
+ when "-"
430
+ x[0] - x[2]
431
+ when "*"
432
+ x[0] * x[2]
433
+ end
434
+ }
435
+ assert_equal(0, rule.parse(buff))
436
+ end
437
+
438
+ def test_rule1()
439
+ expr = "1 + 2"
440
+ assert_equal(3, @calc.parse(expr))
441
+ end
442
+
443
+ def test_rule2()
444
+ expr = "1 - (2 + 3)"
445
+ assert_equal(-4, @calc.parse(expr))
446
+ end
447
+
448
+ def test_rule3()
449
+ expr = "1 - 2 + 3"
450
+ assert_equal(2, @calc.parse(expr))
451
+ end
452
+
453
+ def test_define()
454
+ assert_equal(1+10, Calculator2.parse("1+10"))
455
+ assert_equal(2-1*20+18, Calculator2.parse("2 - 1 * 20 + 18"))
456
+ assert_equal(2-(1-20), Calculator2.parse("2 - (1 - 20)"))
457
+ assert_equal(1+2-3, Calculator2.parse("1 + 2 - 3"))
458
+ end
459
+
460
+ def test_tokenizer()
461
+ tokenizer = StringTokenizer.new({
462
+ /\d+(?!\.\d)/ => :int,
463
+ /\d+\.\d+/ => :real,
464
+ /\+|\-|\*|\// => :op,
465
+ })
466
+ tokens = tokenizer.generate("1 + 1.0 - 2").to_a()
467
+ kinds = tokens.collect{|x| x.kind }
468
+ vals = tokens.collect{|x| x.value }
469
+ assert_equal([:int,:op,:real,:op,:int], kinds)
470
+ assert_equal(["1","+","1.0","-","2"], vals)
471
+ end
472
+ end
metadata ADDED
@@ -0,0 +1,58 @@
1
+ --- !ruby/object:Gem::Specification
2
+ rubygems_version: 0.9.0
3
+ specification_version: 1
4
+ name: tdp4r
5
+ version: !ruby/object:Gem::Version
6
+ version: 1.3.3
7
+ date: 2006-07-19 00:00:00 +09:00
8
+ summary: TDP4R is a top-down parser library that consists of parser combinators and utility functions.
9
+ require_paths:
10
+ - lib
11
+ email: ttate@ttsky.net
12
+ homepage: http://rubyforge.org/projects/tdp4r/
13
+ rubyforge_project: tdp4r
14
+ description:
15
+ autorequire: tdp
16
+ default_executable:
17
+ bindir: bin
18
+ has_rdoc: false
19
+ required_ruby_version: !ruby/object:Gem::Version::Requirement
20
+ requirements:
21
+ - - ">"
22
+ - !ruby/object:Gem::Version
23
+ version: 0.0.0
24
+ version:
25
+ platform: ruby
26
+ signing_key:
27
+ cert_chain:
28
+ post_install_message:
29
+ authors:
30
+ - Takaaki Tateishi
31
+ files:
32
+ - lib/tdp.rb
33
+ - lib/tdputils.rb
34
+ - samples/sample1.rb
35
+ - samples/sample2.rb
36
+ - samples/sample3.rb
37
+ - samples/sample4.rb
38
+ - samples/sample5.rb
39
+ - samples/sample6.rb
40
+ - samples/sample7.rb
41
+ - samples/sample_list.rb
42
+ - test/test_tdp.rb
43
+ - doc/faq.txt
44
+ - doc/guide.txt
45
+ test_files:
46
+ - test/test_tdp.rb
47
+ rdoc_options: []
48
+
49
+ extra_rdoc_files: []
50
+
51
+ executables: []
52
+
53
+ extensions: []
54
+
55
+ requirements: []
56
+
57
+ dependencies: []
58
+