rley 0.8.14 → 0.8.15

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (76) hide show
  1. checksums.yaml +4 -4
  2. data/.rubocop.yml +20 -2
  3. data/CHANGELOG.md +3 -0
  4. data/examples/general/calc_iter1/spec/calculator_spec.rb +9 -9
  5. data/examples/general/calc_iter2/spec/calculator_spec.rb +39 -39
  6. data/examples/general/recursive_right.rb +2 -2
  7. data/lib/rley/constants.rb +1 -1
  8. data/lib/rley/gfg/grm_flow_graph.rb +0 -1
  9. data/lib/rley/parser/parse_entry_set.rb +0 -1
  10. data/lib/rley/parser/parse_walker_factory.rb +0 -1
  11. data/lib/rley/rgn/grammar_builder.rb +0 -2
  12. data/lib/rley/rgn/tokenizer.rb +1 -1
  13. data/lib/rley/syntax/base_grammar_builder.rb +0 -1
  14. data/lib/rley/syntax/grammar.rb +0 -1
  15. data/spec/rley/base/dotted_item_spec.rb +46 -46
  16. data/spec/rley/base/grm_items_builder_spec.rb +1 -1
  17. data/spec/rley/engine_spec.rb +50 -50
  18. data/spec/rley/formatter/asciitree_spec.rb +8 -8
  19. data/spec/rley/formatter/bracket_notation_spec.rb +10 -10
  20. data/spec/rley/formatter/debug_spec.rb +10 -10
  21. data/spec/rley/formatter/json_spec.rb +6 -7
  22. data/spec/rley/gfg/call_edge_spec.rb +6 -6
  23. data/spec/rley/gfg/edge_spec.rb +8 -7
  24. data/spec/rley/gfg/end_vertex_spec.rb +8 -7
  25. data/spec/rley/gfg/epsilon_edge_spec.rb +5 -4
  26. data/spec/rley/gfg/grm_flow_graph_spec.rb +33 -34
  27. data/spec/rley/gfg/item_vertex_spec.rb +34 -36
  28. data/spec/rley/gfg/non_terminal_vertex_spec.rb +12 -12
  29. data/spec/rley/gfg/return_edge_spec.rb +6 -6
  30. data/spec/rley/gfg/scan_edge_spec.rb +7 -6
  31. data/spec/rley/gfg/shortcut_edge_spec.rb +15 -15
  32. data/spec/rley/gfg/start_vertex_spec.rb +8 -8
  33. data/spec/rley/gfg/vertex_spec.rb +18 -18
  34. data/spec/rley/lexical/literal_spec.rb +5 -5
  35. data/spec/rley/lexical/token_range_spec.rb +55 -55
  36. data/spec/rley/lexical/token_spec.rb +17 -16
  37. data/spec/rley/parse_forest_visitor_spec.rb +30 -32
  38. data/spec/rley/parse_rep/ambiguous_parse_spec.rb +2 -2
  39. data/spec/rley/parse_rep/ast_builder_spec.rb +30 -30
  40. data/spec/rley/parse_rep/cst_builder_spec.rb +85 -85
  41. data/spec/rley/parse_rep/groucho_spec.rb +23 -23
  42. data/spec/rley/parse_rep/parse_forest_builder_spec.rb +42 -42
  43. data/spec/rley/parse_rep/parse_forest_factory_spec.rb +10 -12
  44. data/spec/rley/parse_rep/parse_tree_factory_spec.rb +10 -15
  45. data/spec/rley/parse_tree_visitor_spec.rb +43 -46
  46. data/spec/rley/parser/dangling_else_spec.rb +12 -12
  47. data/spec/rley/parser/error_reason_spec.rb +37 -37
  48. data/spec/rley/parser/gfg_chart_spec.rb +27 -29
  49. data/spec/rley/parser/gfg_earley_parser_spec.rb +55 -56
  50. data/spec/rley/parser/gfg_parsing_spec.rb +106 -103
  51. data/spec/rley/parser/parse_entry_set_spec.rb +63 -61
  52. data/spec/rley/parser/parse_entry_spec.rb +73 -71
  53. data/spec/rley/parser/parse_walker_factory_spec.rb +14 -15
  54. data/spec/rley/ptree/non_terminal_node_spec.rb +16 -16
  55. data/spec/rley/ptree/parse_tree_node_spec.rb +11 -11
  56. data/spec/rley/ptree/parse_tree_spec.rb +6 -8
  57. data/spec/rley/ptree/terminal_node_spec.rb +6 -6
  58. data/spec/rley/rgn/grammar_builder_spec.rb +69 -67
  59. data/spec/rley/rgn/parser_spec.rb +63 -63
  60. data/spec/rley/rgn/repetition_node_spec.rb +15 -15
  61. data/spec/rley/rgn/sequence_node_spec.rb +10 -10
  62. data/spec/rley/rgn/symbol_node_spec.rb +5 -6
  63. data/spec/rley/rgn/tokenizer_spec.rb +68 -67
  64. data/spec/rley/sppf/alternative_node_spec.rb +16 -16
  65. data/spec/rley/sppf/non_terminal_node_spec.rb +20 -20
  66. data/spec/rley/sppf/token_node_spec.rb +13 -13
  67. data/spec/rley/syntax/base_grammar_builder_spec.rb +76 -86
  68. data/spec/rley/syntax/grammar_spec.rb +40 -78
  69. data/spec/rley/syntax/grm_symbol_spec.rb +7 -7
  70. data/spec/rley/syntax/match_closest_spec.rb +8 -8
  71. data/spec/rley/syntax/non_terminal_spec.rb +25 -25
  72. data/spec/rley/syntax/production_spec.rb +33 -33
  73. data/spec/rley/syntax/symbol_seq_spec.rb +27 -27
  74. data/spec/rley/syntax/terminal_spec.rb +12 -11
  75. data/spec/support/base_tokenizer_spec.rb +9 -8
  76. metadata +2 -2
@@ -8,60 +8,63 @@ require_relative '../../../lib/rley/parser/error_reason'
8
8
  module Rley # Open this namespace to avoid module qualifier prefixes
9
9
  module Parser # Open this namespace to avoid module qualifier prefixes
10
10
  describe NoInput do
11
- context 'Initialization:' do
12
- # Default instantiation rule
13
- subject { NoInput.new }
11
+ # Default instantiation rule
12
+ subject(:an_exception) { described_class.new }
14
13
 
15
- it 'should be created without argument' do
16
- expect { NoInput.new }.not_to raise_error
14
+ context 'Initialization:' do
15
+ it 'is created without argument' do
16
+ expect { described_class.new }.not_to raise_error
17
17
  end
18
18
 
19
- it 'should know the error token rank' do
20
- expect(subject.rank).to eq(0)
19
+ it 'knows the error token rank' do
20
+ expect(an_exception.rank).to eq(0)
21
21
  end
22
22
  end # context
23
23
 
24
24
  context 'Provided services:' do
25
- it 'should emit a standard message' do
25
+ it 'emits a standard message' do
26
26
  text = 'Input cannot be empty.'
27
- expect(subject.to_s).to eq(text)
28
- expect(subject.message).to eq(text)
27
+ expect(an_exception.to_s).to eq(text)
28
+ expect(an_exception.message).to eq(text)
29
29
  end
30
30
 
31
- it 'should give a clear inspection text' do
31
+ it 'gives a clear inspection text' do
32
32
  text = 'Rley::Parser::NoInput: Input cannot be empty.'
33
- expect(subject.inspect).to eq(text)
33
+ expect(an_exception.inspect).to eq(text)
34
34
  end
35
35
  end # context
36
36
  end # describe
37
37
 
38
38
  describe ExpectationNotMet do
39
+ # Default instantiation rule
40
+ subject(:an_exception) { described_class.new(3, err_token, terminals) }
41
+
39
42
  let(:err_token) { double('fake-token') }
40
43
  let(:terminals) do
41
44
  %w[PLUS LPAREN].map { |name| Syntax::Terminal.new(name) }
42
45
  end
43
46
 
44
- # Default instantiation rule
45
- subject { ExpectationNotMet.new(3, err_token, terminals) }
46
-
47
47
  context 'Initialization:' do
48
- it 'should be created with arguments' do
48
+ it 'is created with arguments' do
49
49
  expect do
50
- ExpectationNotMet.new(3, err_token, terminals)
50
+ described_class.new(3, err_token, terminals)
51
51
  end.not_to raise_error
52
52
  end
53
53
 
54
- it 'should know the error position' do
55
- expect(subject.rank).to eq(3)
54
+ it 'knows the error position' do
55
+ expect(an_exception.rank).to eq(3)
56
56
  end
57
57
 
58
- it 'should know the expected terminals' do
59
- expect(subject.expected_terminals).to eq(terminals)
58
+ it 'knows the expected terminals' do
59
+ expect(an_exception.expected_terminals).to eq(terminals)
60
60
  end
61
61
  end # context
62
62
  end # describe
63
63
 
64
64
  describe UnexpectedToken do
65
+ # Default instantiation rule
66
+ subject(:an_exception) { described_class.new(3, err_token, terminals) }
67
+
65
68
  let(:err_lexeme) { '-' }
66
69
  let(:err_terminal) { Syntax::Terminal.new('MINUS') }
67
70
  let(:pos) { Lexical::Position.new(3, 4) }
@@ -70,30 +73,30 @@ module Rley # Open this namespace to avoid module qualifier prefixes
70
73
  %w[PLUS LPAREN].map { |name| Syntax::Terminal.new(name) }
71
74
  end
72
75
 
73
- # Default instantiation rule
74
- subject { UnexpectedToken.new(3, err_token, terminals) }
75
-
76
76
  context 'Initialization:' do
77
- it 'should be created with arguments' do
77
+ it 'is created with arguments' do
78
78
  expect do
79
- UnexpectedToken.new(3, err_token, terminals)
79
+ described_class.new(3, err_token, terminals)
80
80
  end.not_to raise_error
81
81
  end
82
82
  end # context
83
83
 
84
84
  context 'Provided services:' do
85
- it 'should emit a message' do
85
+ it 'emits a message' do
86
86
  text = <<MESSAGE_END
87
87
  Syntax error at or near token line 3, column 4 >>>-<<<
88
88
  Expected one of: ['PLUS', 'LPAREN'], found a 'MINUS' instead.
89
89
  MESSAGE_END
90
- expect(subject.to_s).to eq(text.chomp)
91
- expect(subject.message).to eq(text.chomp)
90
+ expect(an_exception.to_s).to eq(text.chomp)
91
+ expect(an_exception.message).to eq(text.chomp)
92
92
  end
93
93
  end # context
94
94
  end # describe
95
95
 
96
96
  describe PrematureInputEnd do
97
+ # Default instantiation rule
98
+ subject(:an_exception) { described_class.new(3, err_token, terminals) }
99
+
97
100
  let(:err_lexeme) { '+' }
98
101
  let(:err_terminal) { Syntax::Terminal.new('PLUS') }
99
102
  let(:pos) { Lexical::Position.new(3, 4) }
@@ -102,25 +105,22 @@ MESSAGE_END
102
105
  %w[INT LPAREN].map { |name| Syntax::Terminal.new(name) }
103
106
  end
104
107
 
105
- # Default instantiation rule
106
- subject { PrematureInputEnd.new(3, err_token, terminals) }
107
-
108
108
  context 'Initialization:' do
109
- it 'should be created with arguments' do
109
+ it 'is created with arguments' do
110
110
  expect do
111
- PrematureInputEnd.new(3, err_token, terminals)
111
+ described_class.new(3, err_token, terminals)
112
112
  end.not_to raise_error
113
113
  end
114
114
  end # context
115
115
 
116
116
  context 'Provided services:' do
117
- it 'should emit a message' do
117
+ it 'emits a message' do
118
118
  text = <<MESSAGE_END
119
119
  Premature end of input after '+' at position line 3, column 4
120
120
  Expected one of: ['INT', 'LPAREN'].
121
121
  MESSAGE_END
122
- expect(subject.to_s).to eq(text.chomp)
123
- expect(subject.message).to eq(text.chomp)
122
+ expect(an_exception.to_s).to eq(text.chomp)
123
+ expect(an_exception.message).to eq(text.chomp)
124
124
  end
125
125
  end # context
126
126
  end # describe
@@ -28,6 +28,9 @@ module Rley # Open this namespace to avoid module qualifier prefixes
28
28
  return helper.build_dotted_items(aGrammar)
29
29
  end
30
30
 
31
+ # Default instantiation rule
32
+ subject(:a_chart) { described_class.new(sample_gfg) }
33
+
31
34
  let(:count_token) { 20 }
32
35
  let(:output) { StringIO.new('', 'w') }
33
36
 
@@ -50,58 +53,53 @@ module Rley # Open this namespace to avoid module qualifier prefixes
50
53
  let(:sample_start_symbol) { sample_gfg.start_vertex.non_terminal }
51
54
  let(:second_vertex) { sample_gfg.start_vertex.edges[0].successor }
52
55
 
53
-
54
- # Default instantiation rule
55
- subject { GFGChart.new(sample_gfg) }
56
-
57
-
58
56
  context 'Initialization:' do
59
- it 'should be created with start vertex, token count' do
60
- expect { GFGChart.new(sample_gfg) }.not_to raise_error
57
+ it 'is created with start vertex, token count' do
58
+ expect { described_class.new(sample_gfg) }.not_to raise_error
61
59
  end
62
60
 
63
- it 'should have one entry set' do
64
- expect(subject.sets.size).to eq(1)
61
+ it 'has one entry set' do
62
+ expect(a_chart.sets.size).to eq(1)
65
63
  end
66
64
 
67
- it 'should know the start symbol' do
68
- expect(subject.start_symbol).to eq(sample_start_symbol)
65
+ it 'knows the start symbol' do
66
+ expect(a_chart.start_symbol).to eq(sample_start_symbol)
69
67
  end
70
68
 
71
- it 'should know the initial parse entry' do
72
- expect(subject.initial_entry.vertex).to eq(sample_gfg.start_vertex)
73
- expect(subject.initial_entry.origin).to eq(0)
69
+ it 'knows the initial parse entry' do
70
+ expect(a_chart.initial_entry.vertex).to eq(sample_gfg.start_vertex)
71
+ expect(a_chart.initial_entry.origin).to eq(0)
74
72
  end
75
73
  end # context
76
74
 
77
75
  context 'Provided services:' do
78
- it 'should accept the pushing of a parse entry in existing set' do
79
- expect(subject.sets[0].entries.size).to eq(1)
80
- subject.push_entry(second_vertex, 0, 0, :scan_rule)
81
- expect(subject.sets[0].entries.size).to eq(2)
76
+ it 'accepts the pushing of a parse entry in existing set' do
77
+ expect(a_chart.sets[0].entries.size).to eq(1)
78
+ a_chart.push_entry(second_vertex, 0, 0, :scan_rule)
79
+ expect(a_chart.sets[0].entries.size).to eq(2)
82
80
  end
83
81
 
84
- it 'should accept the pushing of a parse entry in new set' do
85
- expect(subject.sets[0].entries.size).to eq(1)
86
- subject.push_entry(second_vertex, 0, 1, :scan_rule)
87
- expect(subject.sets[0].entries.size).to eq(1)
88
- expect(subject.sets.size).to eq(2)
89
- expect(subject.sets[1].entries.size).to eq(1)
82
+ it 'accepts the pushing of a parse entry in new set' do
83
+ expect(a_chart.sets[0].entries.size).to eq(1)
84
+ a_chart.push_entry(second_vertex, 0, 1, :scan_rule)
85
+ expect(a_chart.sets[0].entries.size).to eq(1)
86
+ expect(a_chart.sets.size).to eq(2)
87
+ expect(a_chart.sets[1].entries.size).to eq(1)
90
88
  end
91
89
 
92
- it 'should retrieve an existing set at given position' do
93
- expect(subject[0]).to eq(subject.sets[0])
90
+ it 'retrieves an existing set at given position' do
91
+ expect(a_chart[0]).to eq(a_chart.sets[0])
94
92
  end
95
93
 
96
- it 'should a user-friendly text representation of itself' do
97
- subject.push_entry(second_vertex, 0, 1, :scan_rule)
94
+ it 'returns a user-friendly text representation of itself' do
95
+ a_chart.push_entry(second_vertex, 0, 1, :scan_rule)
98
96
  representation = <<REPR
99
97
  State[0]
100
98
  .S | 0
101
99
  State[1]
102
100
  S => . A | 0
103
101
  REPR
104
- expect(subject.to_s).to eq(representation)
102
+ expect(a_chart.to_s).to eq(representation)
105
103
  end
106
104
  end # context
107
105
  end # describe
@@ -28,6 +28,9 @@ module Rley # Open this namespace to avoid module qualifier prefixes
28
28
  include GrammarHelper # Mix-in with method for creating token sequence
29
29
  include ExpectationHelper # Mix-in with expectation on parse entry sets
30
30
 
31
+ # Default instantiation rule
32
+ subject(:a_parser) { described_class.new(grammar_abc) }
33
+
31
34
  # Factory method. Build a production with the given sequence
32
35
  # of symbols as its rhs.
33
36
  let(:grammar_abc) do
@@ -39,7 +42,6 @@ module Rley # Open this namespace to avoid module qualifier prefixes
39
42
  build_token_sequence(%w[a a b c c], grammar_abc)
40
43
  end
41
44
 
42
-
43
45
  # Grammar 2: A simple arithmetic expression language
44
46
  # (based on example in article on Earley's algorithm in Wikipedia)
45
47
  # P ::= S.
@@ -80,33 +82,30 @@ module Rley # Open this namespace to avoid module qualifier prefixes
80
82
  build_token_sequence(input_sequence, grammar_expr)
81
83
  end
82
84
 
83
- # Default instantiation rule
84
- subject { GFGEarleyParser.new(grammar_abc) }
85
-
86
85
  context 'Initialization:' do
87
- it 'should be created with a grammar' do
88
- expect { GFGEarleyParser.new(grammar_abc) }.not_to raise_error
86
+ it 'is created with a grammar' do
87
+ expect { described_class.new(grammar_abc) }.not_to raise_error
89
88
  end
90
89
 
91
- it 'should know its grammar' do
92
- expect(subject.grammar).to eq(grammar_abc)
90
+ it 'knows its grammar' do
91
+ expect(a_parser.grammar).to eq(grammar_abc)
93
92
  end
94
93
 
95
- it 'should know its dotted items' do
96
- expect(subject.dotted_items.size).to eq(8)
94
+ it 'knows its dotted items' do
95
+ expect(a_parser.dotted_items.size).to eq(8)
97
96
  end
98
97
 
99
- it 'should know its flow graph' do
100
- expect(subject.gf_graph).to be_kind_of(GFG::GrmFlowGraph)
98
+ it 'knows its flow graph' do
99
+ expect(a_parser.gf_graph).to be_a(GFG::GrmFlowGraph)
101
100
  end
102
101
  end # context
103
102
 
104
- context 'Parsing: ' do
103
+ context 'Parsing:' do
105
104
  # rubocop: disable Naming/VariableNumber
106
- it 'should parse a valid simple input' do
107
- parse_result = subject.parse(build_token_sequence(%w[a a b c c], grammar_abc))
108
- expect(parse_result.success?).to eq(true)
109
- expect(parse_result.ambiguous?).to eq(false)
105
+ it 'parses a valid simple input' do
106
+ parse_result = a_parser.parse(build_token_sequence(%w[a a b c c], grammar_abc))
107
+ expect(parse_result.success?).to be(true)
108
+ expect(parse_result.ambiguous?).to be(false)
110
109
  ######################
111
110
  # Expectation chart[0]:
112
111
  expected = [
@@ -184,11 +183,11 @@ module Rley # Open this namespace to avoid module qualifier prefixes
184
183
  end
185
184
  # rubocop: enable Naming/VariableNumber
186
185
 
187
- it 'should parse a valid simple expression' do
188
- instance = GFGEarleyParser.new(grammar_expr)
186
+ it 'parses a valid simple expression' do
187
+ instance = described_class.new(grammar_expr)
189
188
  parse_result = instance.parse(grm2_tokens)
190
- expect(parse_result.success?).to eq(true)
191
- # expect(parse_result.ambiguous?).to eq(false)
189
+ expect(parse_result.success?).to be(true)
190
+ # expect(parse_result.ambiguous?).to be(false)
192
191
 
193
192
  ###################### S(0): . 2 + 3 * 4
194
193
  # Expectation chart[0]:
@@ -279,14 +278,14 @@ module Rley # Open this namespace to avoid module qualifier prefixes
279
278
  compare_entry_texts(parse_result.chart[5], expected)
280
279
  end
281
280
 
282
- it 'should support Kleene plus ' do
281
+ it 'supports Kleene plus' do
283
282
  extend(GrammarIntSeqHelper)
284
283
  grammar = grammar_int_seq_builder.grammar
285
- instance = GFGEarleyParser.new(grammar)
284
+ instance = described_class.new(grammar)
286
285
  tokens = int_seq_tokenizer('6, 36, 216')
287
286
  parse_result = nil
288
287
  expect { parse_result = instance.parse(tokens) }.not_to raise_error
289
- expect(parse_result.success?).to eq(true)
288
+ expect(parse_result.success?).to be(true)
290
289
 
291
290
  ###################### S(0): . 6, 36, 216
292
291
  # Expectation chart[0]:
@@ -364,7 +363,7 @@ module Rley # Open this namespace to avoid module qualifier prefixes
364
363
  compare_entry_texts(parse_result.chart[4], expected)
365
364
  end
366
365
 
367
- it 'should parse a nullable grammar' do
366
+ it 'parses a nullable grammar' do
368
367
  # Simple but problematic grammar for the original Earley parser
369
368
  # (based on example in D. Grune, C. Jacobs "Parsing Techniques" book)
370
369
  # Ss => A A 'x';
@@ -379,10 +378,10 @@ module Rley # Open this namespace to avoid module qualifier prefixes
379
378
  pos = Lexical::Position.new(1, 1)
380
379
  tokens = [Lexical::Token.new('x', t_x, pos)]
381
380
 
382
- instance = GFGEarleyParser.new(builder.grammar)
381
+ instance = described_class.new(builder.grammar)
383
382
  expect { instance.parse(tokens) }.not_to raise_error
384
383
  parse_result = instance.parse(tokens)
385
- expect(parse_result.success?).to eq(true)
384
+ expect(parse_result.success?).to be(true)
386
385
  ###################### S(0): . x
387
386
  # Expectation chart[0]:
388
387
  expected = [
@@ -405,7 +404,7 @@ module Rley # Open this namespace to avoid module qualifier prefixes
405
404
  compare_entry_texts(parse_result.chart[1], expected)
406
405
  end
407
406
 
408
- it 'should parse an ambiguous grammar (I)' do
407
+ it 'parses an ambiguous grammar (I)' do
409
408
  # Grammar 3: A ambiguous arithmetic expression language
410
409
  # (based on example in article on Earley's algorithm in Wikipedia)
411
410
  # P => S.
@@ -433,11 +432,11 @@ module Rley # Open this namespace to avoid module qualifier prefixes
433
432
  { '4' => 'integer' }
434
433
  ]
435
434
  tokens = build_token_sequence(input_sequence, builder.grammar)
436
- instance = GFGEarleyParser.new(builder.grammar)
435
+ instance = described_class.new(builder.grammar)
437
436
  expect { instance.parse(tokens) }.not_to raise_error
438
437
  parse_result = instance.parse(tokens)
439
- expect(parse_result.success?).to eq(true)
440
- # expect(parse_result.ambiguous?).to eq(true)
438
+ expect(parse_result.success?).to be(true)
439
+ # expect(parse_result.ambiguous?).to be(true)
441
440
 
442
441
  ###################### S(0): . 2 + 3 * 4
443
442
  # Expectation chart[0]:
@@ -557,15 +556,15 @@ module Rley # Open this namespace to avoid module qualifier prefixes
557
556
  check_antecedence(parse_result, 5, expected_antecedents)
558
557
  end
559
558
 
560
- it 'should parse an ambiguous grammar (II)' do
559
+ it 'parses an ambiguous grammar (II)' do
561
560
  extend(AmbiguousGrammarHelper)
562
561
  grammar = grammar_builder.grammar
563
- instance = GFGEarleyParser.new(grammar)
562
+ instance = described_class.new(grammar)
564
563
  tokens = tokenize('abc + def + ghi', grammar)
565
564
  expect { instance.parse(tokens) }.not_to raise_error
566
565
  parse_result = instance.parse(tokens)
567
- expect(parse_result.success?).to eq(true)
568
- # expect(parse_result.ambiguous?).to eq(true)
566
+ expect(parse_result.success?).to be(true)
567
+ # expect(parse_result.ambiguous?).to be(true)
569
568
 
570
569
  ###################### S(0): . abc + def + ghi
571
570
  # Expectation chart[0]:
@@ -642,11 +641,11 @@ module Rley # Open this namespace to avoid module qualifier prefixes
642
641
  compare_entry_texts(parse_result.chart[5], expected)
643
642
  end
644
643
 
645
- it 'should parse an invalid simple input' do
644
+ it 'parses an invalid simple input' do
646
645
  # Parse an erroneous input (b is missing)
647
646
  wrong = build_token_sequence(%w[a a c c], grammar_abc)
648
- parse_result = subject.parse(wrong)
649
- expect(parse_result.success?).to eq(false)
647
+ parse_result = a_parser.parse(wrong)
648
+ expect(parse_result.success?).to be(false)
650
649
  err_msg = <<-MSG
651
650
  Syntax error at or near token line 1, column 5 >>>c<<<
652
651
  Expected one of: ['a', 'b'], found a 'c' instead.
@@ -654,24 +653,24 @@ MSG
654
653
  expect(parse_result.failure_reason.message).to eq(err_msg.chomp)
655
654
  end
656
655
 
657
- it 'should report error when no input provided but was required' do
656
+ it 'reports error when no input provided but was required' do
658
657
  helper = GrammarPBHelper.new
659
658
  grammar = helper.grammar
660
- instance = GFGEarleyParser.new(grammar)
659
+ instance = described_class.new(grammar)
661
660
  tokens = helper.tokenize('')
662
661
  parse_result = instance.parse(tokens)
663
- expect(parse_result.success?).to eq(false)
662
+ expect(parse_result.success?).to be(false)
664
663
  err_msg = 'Input cannot be empty.'
665
664
  expect(parse_result.failure_reason.message).to eq(err_msg)
666
665
  end
667
666
 
668
- it 'should report error when input ends prematurely' do
667
+ it 'reports error when input ends prematurely' do
669
668
  helper = GrammarPBHelper.new
670
669
  grammar = helper.grammar
671
- instance = GFGEarleyParser.new(grammar)
670
+ instance = described_class.new(grammar)
672
671
  tokens = helper.tokenize('1 +')
673
672
  parse_result = instance.parse(tokens)
674
- expect(parse_result.success?).to eq(false)
673
+ expect(parse_result.success?).to be(false)
675
674
  ###################### S(0) == . 1 +
676
675
  # Expectation chart[0]:
677
676
  expected = [
@@ -713,16 +712,16 @@ MSG
713
712
  end
714
713
 
715
714
 
716
- it 'should parse a common sample' do
715
+ it 'parses a common sample' do
717
716
  # Use grammar based on example found in paper of
718
717
  # K. Pingali and G. Bilardi:
719
718
  # "A Graphical Model for Context-Free Grammar Parsing"
720
719
  helper = GrammarPBHelper.new
721
720
  grammar = helper.grammar
722
- instance = GFGEarleyParser.new(grammar)
721
+ instance = described_class.new(grammar)
723
722
  tokens = helper.tokenize('7 + 8 + 9')
724
723
  parse_result = instance.parse(tokens)
725
- expect(parse_result.success?).to eq(true)
724
+ expect(parse_result.success?).to be(true)
726
725
  ###################### S(0) == . 7 + 8 + 9
727
726
  # Expectation chart[0]:
728
727
  expected = [
@@ -801,7 +800,7 @@ MSG
801
800
  compare_entry_texts(parse_result.chart[5], expected)
802
801
  end
803
802
 
804
- it 'should parse a grammar with nullable nonterminals' do
803
+ it 'parses a grammar with nullable nonterminals' do
805
804
  # Grammar 4: A grammar with nullable nonterminal
806
805
  # based on example from "Parsing Techniques" book
807
806
  # (D. Grune, C. Jabobs)
@@ -828,10 +827,10 @@ MSG
828
827
  end
829
828
 
830
829
  tokens = build_token_sequence(%w[a a / a], builder.grammar)
831
- instance = GFGEarleyParser.new(builder.grammar)
830
+ instance = described_class.new(builder.grammar)
832
831
  expect { instance.parse(tokens) }.not_to raise_error
833
832
  parse_result = instance.parse(tokens)
834
- expect(parse_result.success?).to eq(true)
833
+ expect(parse_result.success?).to be(true)
835
834
 
836
835
  ###################### S(0) == . a a / a
837
836
  # Expectation chart[0]:
@@ -923,7 +922,7 @@ MSG
923
922
  compare_entry_texts(parse_result.chart[4], expected)
924
923
  end
925
924
 
926
- it 'should parse a right recursive grammar' do
925
+ it 'parses a right recursive grammar' do
927
926
  # Simple right recursive grammar
928
927
  # based on example in D. Grune, C. Jacobs "Parsing Techniques" book
929
928
  # pp. 224 et sq.
@@ -939,9 +938,9 @@ MSG
939
938
  grammar = builder.grammar
940
939
  tokens = build_token_sequence(%w[a a a a], grammar)
941
940
 
942
- instance = GFGEarleyParser.new(grammar)
941
+ instance = described_class.new(grammar)
943
942
  parse_result = instance.parse(tokens)
944
- expect(parse_result.success?).to eq(true)
943
+ expect(parse_result.success?).to be(true)
945
944
  ###################### S(0): . a a a a
946
945
  # Expectation chart[0]:
947
946
  expected = [
@@ -1016,7 +1015,7 @@ MSG
1016
1015
  compare_entry_texts(parse_result.chart[4], expected)
1017
1016
  end
1018
1017
 
1019
- it 'should support modifier(s) in start rule' do
1018
+ it 'supports modifier(s) in start rule' do
1020
1019
  # An implicit EOF marker is a special terminal
1021
1020
  # that denotes the end of input string but doesn't
1022
1021
  # appear explicitly as some character or text in the input.
@@ -1030,9 +1029,9 @@ MSG
1030
1029
  grammar = builder.grammar
1031
1030
  tokens = build_token_sequence(%w[EOF], grammar)
1032
1031
  tokens[0].instance_variable_set(:@lexeme, '')
1033
- instance = GFGEarleyParser.new(grammar)
1032
+ instance = described_class.new(grammar)
1034
1033
  parse_result = instance.parse(tokens)
1035
- expect(parse_result.success?).to eq(true)
1034
+ expect(parse_result.success?).to be(true)
1036
1035
  end
1037
1036
  end # context
1038
1037
  end # describe