rley 0.8.14 → 0.8.15

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (76) hide show
  1. checksums.yaml +4 -4
  2. data/.rubocop.yml +20 -2
  3. data/CHANGELOG.md +3 -0
  4. data/examples/general/calc_iter1/spec/calculator_spec.rb +9 -9
  5. data/examples/general/calc_iter2/spec/calculator_spec.rb +39 -39
  6. data/examples/general/recursive_right.rb +2 -2
  7. data/lib/rley/constants.rb +1 -1
  8. data/lib/rley/gfg/grm_flow_graph.rb +0 -1
  9. data/lib/rley/parser/parse_entry_set.rb +0 -1
  10. data/lib/rley/parser/parse_walker_factory.rb +0 -1
  11. data/lib/rley/rgn/grammar_builder.rb +0 -2
  12. data/lib/rley/rgn/tokenizer.rb +1 -1
  13. data/lib/rley/syntax/base_grammar_builder.rb +0 -1
  14. data/lib/rley/syntax/grammar.rb +0 -1
  15. data/spec/rley/base/dotted_item_spec.rb +46 -46
  16. data/spec/rley/base/grm_items_builder_spec.rb +1 -1
  17. data/spec/rley/engine_spec.rb +50 -50
  18. data/spec/rley/formatter/asciitree_spec.rb +8 -8
  19. data/spec/rley/formatter/bracket_notation_spec.rb +10 -10
  20. data/spec/rley/formatter/debug_spec.rb +10 -10
  21. data/spec/rley/formatter/json_spec.rb +6 -7
  22. data/spec/rley/gfg/call_edge_spec.rb +6 -6
  23. data/spec/rley/gfg/edge_spec.rb +8 -7
  24. data/spec/rley/gfg/end_vertex_spec.rb +8 -7
  25. data/spec/rley/gfg/epsilon_edge_spec.rb +5 -4
  26. data/spec/rley/gfg/grm_flow_graph_spec.rb +33 -34
  27. data/spec/rley/gfg/item_vertex_spec.rb +34 -36
  28. data/spec/rley/gfg/non_terminal_vertex_spec.rb +12 -12
  29. data/spec/rley/gfg/return_edge_spec.rb +6 -6
  30. data/spec/rley/gfg/scan_edge_spec.rb +7 -6
  31. data/spec/rley/gfg/shortcut_edge_spec.rb +15 -15
  32. data/spec/rley/gfg/start_vertex_spec.rb +8 -8
  33. data/spec/rley/gfg/vertex_spec.rb +18 -18
  34. data/spec/rley/lexical/literal_spec.rb +5 -5
  35. data/spec/rley/lexical/token_range_spec.rb +55 -55
  36. data/spec/rley/lexical/token_spec.rb +17 -16
  37. data/spec/rley/parse_forest_visitor_spec.rb +30 -32
  38. data/spec/rley/parse_rep/ambiguous_parse_spec.rb +2 -2
  39. data/spec/rley/parse_rep/ast_builder_spec.rb +30 -30
  40. data/spec/rley/parse_rep/cst_builder_spec.rb +85 -85
  41. data/spec/rley/parse_rep/groucho_spec.rb +23 -23
  42. data/spec/rley/parse_rep/parse_forest_builder_spec.rb +42 -42
  43. data/spec/rley/parse_rep/parse_forest_factory_spec.rb +10 -12
  44. data/spec/rley/parse_rep/parse_tree_factory_spec.rb +10 -15
  45. data/spec/rley/parse_tree_visitor_spec.rb +43 -46
  46. data/spec/rley/parser/dangling_else_spec.rb +12 -12
  47. data/spec/rley/parser/error_reason_spec.rb +37 -37
  48. data/spec/rley/parser/gfg_chart_spec.rb +27 -29
  49. data/spec/rley/parser/gfg_earley_parser_spec.rb +55 -56
  50. data/spec/rley/parser/gfg_parsing_spec.rb +106 -103
  51. data/spec/rley/parser/parse_entry_set_spec.rb +63 -61
  52. data/spec/rley/parser/parse_entry_spec.rb +73 -71
  53. data/spec/rley/parser/parse_walker_factory_spec.rb +14 -15
  54. data/spec/rley/ptree/non_terminal_node_spec.rb +16 -16
  55. data/spec/rley/ptree/parse_tree_node_spec.rb +11 -11
  56. data/spec/rley/ptree/parse_tree_spec.rb +6 -8
  57. data/spec/rley/ptree/terminal_node_spec.rb +6 -6
  58. data/spec/rley/rgn/grammar_builder_spec.rb +69 -67
  59. data/spec/rley/rgn/parser_spec.rb +63 -63
  60. data/spec/rley/rgn/repetition_node_spec.rb +15 -15
  61. data/spec/rley/rgn/sequence_node_spec.rb +10 -10
  62. data/spec/rley/rgn/symbol_node_spec.rb +5 -6
  63. data/spec/rley/rgn/tokenizer_spec.rb +68 -67
  64. data/spec/rley/sppf/alternative_node_spec.rb +16 -16
  65. data/spec/rley/sppf/non_terminal_node_spec.rb +20 -20
  66. data/spec/rley/sppf/token_node_spec.rb +13 -13
  67. data/spec/rley/syntax/base_grammar_builder_spec.rb +76 -86
  68. data/spec/rley/syntax/grammar_spec.rb +40 -78
  69. data/spec/rley/syntax/grm_symbol_spec.rb +7 -7
  70. data/spec/rley/syntax/match_closest_spec.rb +8 -8
  71. data/spec/rley/syntax/non_terminal_spec.rb +25 -25
  72. data/spec/rley/syntax/production_spec.rb +33 -33
  73. data/spec/rley/syntax/symbol_seq_spec.rb +27 -27
  74. data/spec/rley/syntax/terminal_spec.rb +12 -11
  75. data/spec/support/base_tokenizer_spec.rb +9 -8
  76. metadata +2 -2
@@ -35,6 +35,9 @@ module Rley # Open this namespace to avoid module qualifier prefixes
35
35
  return helper.build_dotted_items(aGrammar)
36
36
  end
37
37
 
38
+ # Default instantiation rule
39
+ subject(:parsing) { described_class.new(sample_gfg) }
40
+
38
41
  # Factory method. Build a production with the given sequence
39
42
  # of symbols as its rhs.
40
43
  let(:grm1) do
@@ -52,34 +55,31 @@ module Rley # Open this namespace to avoid module qualifier prefixes
52
55
 
53
56
  let(:output) { StringIO.new('', 'w') }
54
57
 
55
- # Default instantiation rule
56
- subject { GFGParsing.new(sample_gfg) }
57
-
58
58
  context 'Initialization:' do
59
- it 'should be created with a GFG' do
60
- expect { GFGParsing.new(sample_gfg) }.not_to raise_error
59
+ it 'is created with a GFG' do
60
+ expect { described_class.new(sample_gfg) }.not_to raise_error
61
61
  end
62
62
 
63
- it 'should have an empty tokens array' do
64
- expect(subject.tokens).to be_empty
63
+ it 'has an empty tokens array' do
64
+ expect(parsing.tokens).to be_empty
65
65
  end
66
66
 
67
- it 'should know its chart object' do
68
- expect(subject.chart).to be_kind_of(GFGChart)
67
+ it 'knows its chart object' do
68
+ expect(parsing.chart).to be_a(GFGChart)
69
69
  end
70
70
 
71
- it 'should know the initial parse entry' do
72
- expect(subject.initial_entry).to eq(subject.chart.initial_entry)
71
+ it 'knows the initial parse entry' do
72
+ expect(parsing.initial_entry).to eq(parsing.chart.initial_entry)
73
73
  end
74
74
 
75
- it 'should have no antecedence for the initial parse entry' do
76
- antecedence = subject.antecedence
75
+ it 'has no antecedence for the initial parse entry' do
76
+ antecedence = parsing.antecedence
77
77
  expect(antecedence.size).to eq(1)
78
- expect(antecedence.fetch(subject.initial_entry)).to be_empty
78
+ expect(antecedence.fetch(parsing.initial_entry)).to be_empty
79
79
  end
80
80
 
81
81
  =begin
82
- it 'should emit trace level 1 info' do
82
+ it 'emits trace level 1 info' do
83
83
  tracer = ParseTracer.new(1, output, grm1_tokens)
84
84
  Parsing.new([ start_dotted_rule ], grm1_tokens, tracer)
85
85
  expectations = <<-SNIPPET
@@ -95,16 +95,16 @@ SNIPPET
95
95
  context 'Parsing:' do
96
96
  # Utility method to fill the first entry set...
97
97
  def fill_first_set
98
- subject.start_rule(subject.initial_entry, 0)
99
- subject.call_rule(subject.chart[0].last, 0)
100
- subject.start_rule(subject.chart[0].last, 0)
98
+ parsing.start_rule(parsing.initial_entry, 0)
99
+ parsing.call_rule(parsing.chart[0].last, 0)
100
+ parsing.start_rule(parsing.chart[0].last, 0)
101
101
  end
102
102
 
103
103
  # Utility method to initialize the second entry set...
104
104
  def seed_second_set
105
105
  # Cheating: we change the tokens to scan...
106
106
  # Seeding second entry set...
107
- subject.scan_rule(0, grm1_token_b[0])
107
+ parsing.scan_rule(0, grm1_token_b[0])
108
108
  end
109
109
 
110
110
  # Utility method used to invoke the private method 'push_entry'
@@ -112,93 +112,93 @@ SNIPPET
112
112
  aParsing.send(:push_entry, *args)
113
113
  end
114
114
 
115
- it 'should push a parse entry to a given chart entry set' do
116
- expect(subject.chart.sets[1]).to be_nil
115
+ it 'pushes a parse entry to a given chart entry set' do
116
+ expect(parsing.chart.sets[1]).to be_nil
117
117
  a_vertex = sample_gfg.find_vertex('A => a . A c')
118
118
 
119
- push_entry(subject, a_vertex, 1, 1, :scan_rule)
120
- expect(subject.chart[1].size).to eq(1)
121
- expect(subject.chart[1].first.vertex).to eq(a_vertex)
119
+ push_entry(parsing, a_vertex, 1, 1, :scan_rule)
120
+ expect(parsing.chart[1].size).to eq(1)
121
+ expect(parsing.chart[1].first.vertex).to eq(a_vertex)
122
122
 
123
123
  # Pushing twice the same state must be no-op
124
- push_entry(subject, a_vertex, 1, 1, :scan_rule)
125
- expect(subject.chart[1].size).to eq(1)
124
+ push_entry(parsing, a_vertex, 1, 1, :scan_rule)
125
+ expect(parsing.chart[1].size).to eq(1)
126
126
 
127
127
  # Pushing to another entry set
128
- push_entry(subject, a_vertex, 1, 2, :scan_rule)
129
- expect(subject.chart[2].size).to eq(1)
128
+ push_entry(parsing, a_vertex, 1, 2, :scan_rule)
129
+ expect(parsing.chart[2].size).to eq(1)
130
130
  end
131
131
 
132
- it 'should complain when trying to push nil instead of vertex' do
132
+ it 'complains when trying to push nil instead of vertex' do
133
133
  err = StandardError
134
134
  msg = 'Vertex may not be nil'
135
- expect { push_entry(subject, nil, 1, 1, :start_rule) }
135
+ expect { push_entry(parsing, nil, 1, 1, :start_rule) }
136
136
  .to raise_error(err, msg)
137
137
  end
138
138
 
139
- it 'should use the start rule with initial entry' do
140
- expect(subject.chart[0].size).to eq(1)
141
- initial_entry = subject.initial_entry
142
- subject.start_rule(initial_entry, 0)
139
+ it 'uses the start rule with initial entry' do
140
+ expect(parsing.chart[0].size).to eq(1)
141
+ initial_entry = parsing.initial_entry
142
+ parsing.start_rule(initial_entry, 0)
143
143
 
144
- expect(subject.chart[0].size).to eq(2)
145
- new_entry = subject.chart[0].last
144
+ expect(parsing.chart[0].size).to eq(2)
145
+ new_entry = parsing.chart[0].last
146
146
  expect(new_entry.vertex.label).to eq('S => . A')
147
- expect(subject.antecedence.fetch(new_entry)).to eq([initial_entry])
147
+ expect(parsing.antecedence.fetch(new_entry)).to eq([initial_entry])
148
148
  end
149
149
 
150
- it 'should apply the call rule correctly' do
151
- subject.start_rule(subject.initial_entry, 0)
150
+ it 'applies the call rule correctly' do
151
+ parsing.start_rule(parsing.initial_entry, 0)
152
152
  # A parse entry with vertex 'S => . A' was added...
153
- second_entry = subject.chart[0].last
154
- subject.call_rule(second_entry, 0)
153
+ second_entry = parsing.chart[0].last
154
+ parsing.call_rule(second_entry, 0)
155
155
 
156
- expect(subject.chart[0].size).to eq(3)
157
- new_entry = subject.chart[0].last
156
+ expect(parsing.chart[0].size).to eq(3)
157
+ new_entry = parsing.chart[0].last
158
158
  expect(new_entry.vertex.label).to eq('.A')
159
- expect(subject.antecedence.fetch(new_entry)).to eq([second_entry])
159
+ expect(parsing.antecedence.fetch(new_entry)).to eq([second_entry])
160
160
  end
161
161
 
162
- it 'should apply the start rule correctly' do
163
- subject.start_rule(subject.chart[0].first, 0)
164
- subject.call_rule(subject.chart[0].last, 0)
165
- expect(subject.chart[0].size).to eq(3)
162
+ it 'applies the start rule correctly' do
163
+ parsing.start_rule(parsing.chart[0].first, 0)
164
+ parsing.call_rule(parsing.chart[0].last, 0)
165
+ expect(parsing.chart[0].size).to eq(3)
166
166
  # Last entry is: (.A, 0)
167
- dot_A_entry = subject.chart[0].last
167
+ dot_A_entry = parsing.chart[0].last
168
168
 
169
- subject.start_rule(dot_A_entry, 0)
169
+ parsing.start_rule(dot_A_entry, 0)
170
170
 
171
171
  # Expectations: two entries:
172
172
  expected = ['A => . a A c', 'A => . b']
173
- expect(subject.chart[0].size).to eq(5)
174
- expect(subject.chart[0].pop.vertex.label).to eq(expected.last)
175
- fourth_entry = subject.chart[0].last
173
+ expect(parsing.chart[0].size).to eq(5)
174
+ expect(parsing.chart[0].pop.vertex.label).to eq(expected.last)
175
+ fourth_entry = parsing.chart[0].last
176
176
  expect(fourth_entry.vertex.label).to eq(expected.first)
177
- expect(subject.antecedence.fetch(fourth_entry)).to eq([dot_A_entry])
177
+ expect(parsing.antecedence.fetch(fourth_entry)).to eq([dot_A_entry])
178
178
  end
179
179
 
180
- it 'should apply the scan rule correctly' do
180
+ it 'applies the scan rule correctly' do
181
181
  # Filling manually first entry set...
182
182
  fill_first_set
183
183
  # There are two entries expecting a terminal:
184
184
  # ['A => . a A c', 'A => . b']
185
- fourth_entry = subject.chart[0].entries[3] # 'A => . a A c'
185
+ fourth_entry = parsing.chart[0].entries[3] # 'A => . a A c'
186
186
 
187
- expect(subject.chart.sets[1]).to be_nil
188
- subject.scan_rule(0, grm1_tokens[0])
187
+ expect(parsing.chart.sets[1]).to be_nil
188
+ parsing.scan_rule(0, grm1_tokens[0])
189
189
  # Given that the scanned token is 'a'...
190
190
  # Then a new entry is added in next entry set
191
- expect(subject.chart[1].size).to eq(1)
192
- last_entry = subject.chart[1].last
191
+ expect(parsing.chart[1].size).to eq(1)
192
+ last_entry = parsing.chart[1].last
193
193
 
194
194
  # Entry must be past the terminal symbol
195
195
  expect(last_entry.vertex.label).to eq('A => a . A c')
196
196
  expect(last_entry.origin).to eq(0)
197
- antecedence = subject.antecedence
197
+ antecedence = parsing.antecedence
198
198
  expect(antecedence.fetch(last_entry)).to eq([fourth_entry])
199
199
  end
200
200
 
201
- it 'should apply the exit rule correctly' do
201
+ it 'applies the exit rule correctly' do
202
202
  # Filling manually first entry set...
203
203
  fill_first_set
204
204
 
@@ -207,56 +207,56 @@ SNIPPET
207
207
 
208
208
  # Given that the scanned token is 'b'...
209
209
  # Then a new entry is added in next entry set
210
- expect(subject.chart[1].size).to eq(1)
211
- last_entry = subject.chart[1].last
210
+ expect(parsing.chart[1].size).to eq(1)
211
+ last_entry = parsing.chart[1].last
212
212
 
213
213
  # Entry must be past the terminal symbol
214
214
  expect(last_entry.vertex.label).to eq('A => b .')
215
215
  expect(last_entry.origin).to eq(0)
216
216
 
217
217
  # Apply exit rule...
218
- subject.exit_rule(last_entry, 1)
219
- expect(subject.chart[1].size).to eq(2)
220
- exit_entry = subject.chart[1].last
218
+ parsing.exit_rule(last_entry, 1)
219
+ expect(parsing.chart[1].size).to eq(2)
220
+ exit_entry = parsing.chart[1].last
221
221
  expect(exit_entry.vertex.label).to eq('A.')
222
222
  expect(exit_entry.origin).to eq(0)
223
- expect(subject.antecedence.fetch(exit_entry)).to eq([last_entry])
223
+ expect(parsing.antecedence.fetch(exit_entry)).to eq([last_entry])
224
224
  end
225
225
 
226
- it 'should apply the end rule correctly' do
226
+ it 'applies the end rule correctly' do
227
227
  # Filling manually first entry set...
228
228
  fill_first_set
229
229
 
230
230
  # Initial manually first entry set...
231
231
  seed_second_set
232
- last_entry = subject.chart[1].last
232
+ last_entry = parsing.chart[1].last
233
233
 
234
234
  # Given that the scanned token is 'b'...
235
235
  # New entry must be past the terminal symbol
236
236
  expect(last_entry.vertex.label).to eq('A => b .')
237
237
 
238
238
  # Apply exit rule...
239
- subject.exit_rule(last_entry, 1)
240
- expect(subject.chart[1].size).to eq(2)
241
- exit_entry = subject.chart[1].last
239
+ parsing.exit_rule(last_entry, 1)
240
+ expect(parsing.chart[1].size).to eq(2)
241
+ exit_entry = parsing.chart[1].last
242
242
  expect(exit_entry.vertex.label).to eq('A.')
243
243
 
244
244
  # ... Now the end rule
245
- subject.end_rule(subject.chart[1].last, 1)
246
- expect(subject.chart[1].size).to eq(3)
247
- end_entry = subject.chart[1].last
245
+ parsing.end_rule(parsing.chart[1].last, 1)
246
+ expect(parsing.chart[1].size).to eq(3)
247
+ end_entry = parsing.chart[1].last
248
248
  expect(end_entry.vertex.label).to eq('S => A .')
249
249
  expect(end_entry.origin).to eq(0)
250
- expect(subject.antecedence.fetch(end_entry)).to eq([exit_entry])
250
+ expect(parsing.antecedence.fetch(end_entry)).to eq([exit_entry])
251
251
  end
252
252
 
253
253
  =begin
254
- it 'should retrieve the parse states that expect a given terminal' do
254
+ it 'retrieves the parse states that expect a given terminal' do
255
255
  item1 = DottedItem.new(prod_A1, 2)
256
256
  item2 = DottedItem.new(prod_A1, 1)
257
- subject.push_state(item1, 2, 2, :scanning)
258
- subject.push_state(item2, 2, 2, :scanning)
259
- states = subject.states_expecting(c_, 2, false)
257
+ parsing.push_state(item1, 2, 2, :scanning)
258
+ parsing.push_state(item2, 2, 2, :scanning)
259
+ states = parsing.states_expecting(c_, 2, false)
260
260
  expect(states.size).to eq(1)
261
261
  expect(states[0].dotted_rule).to eq(item1)
262
262
  end
@@ -264,17 +264,17 @@ SNIPPET
264
264
  it 'should update the states upon token match' do
265
265
  # When a input token matches an expected terminal symbol
266
266
  # then new parse states must be pushed to the following chart slot
267
- expect(subject.chart[1]).to be_empty
267
+ expect(parsing.chart[1]).to be_empty
268
268
 
269
269
  item1 = DottedItem.new(prod_A1, 0)
270
270
  item2 = DottedItem.new(prod_A2, 0)
271
- subject.push_state(item1, 0, 0, :completion)
272
- subject.push_state(item2, 0, 0, :completion)
273
- subject.scanning(a_, 0) { |i| i } # Code block is mock
271
+ parsing.push_state(item1, 0, 0, :completion)
272
+ parsing.push_state(item2, 0, 0, :completion)
273
+ parsing.scanning(a_, 0) { |i| i } # Code block is mock
274
274
 
275
275
  # Expected side effect: a new state at chart[1]
276
- expect(subject.chart[1].size).to eq(1)
277
- new_state = subject.chart[1].states[0]
276
+ expect(parsing.chart[1].size).to eq(1)
277
+ new_state = parsing.chart[1].states[0]
278
278
  expect(new_state.dotted_rule).to eq(item1)
279
279
  expect(new_state.origin).to eq(0)
280
280
  end
@@ -282,6 +282,12 @@ SNIPPET
282
282
  end # context
283
283
 
284
284
  context 'Provided services:' do
285
+ subject(:parsing) do
286
+ parser = GFGEarleyParser.new(b_expr_grammar)
287
+ tokens = expr_tokenizer('2 + 3 * 4')
288
+ parser.parse(tokens)
289
+ end
290
+
285
291
  let(:b_expr_grammar) do
286
292
  builder = grammar_expr_builder
287
293
  builder.grammar
@@ -291,14 +297,10 @@ SNIPPET
291
297
  b_expr_grammar.name2symbol[aSymbolName]
292
298
  end
293
299
 
294
- subject do
295
- parser = GFGEarleyParser.new(b_expr_grammar)
296
- tokens = expr_tokenizer('2 + 3 * 4')
297
- parser.parse(tokens)
298
- end
300
+ # rubocop: disable Lint/BinaryOperatorWithIdenticalOperands
299
301
 
300
- it 'should give a text representation of itself' do
301
- repr = subject.to_s
302
+ it 'gives a text representation of itself' do
303
+ repr = parsing.to_s
302
304
  expect(repr).to match(/^success\? true/)
303
305
 
304
306
  # Let's test the last chart state only
@@ -315,11 +317,18 @@ State[5]
315
317
  S => S . + M | 0
316
318
  P. | 0
317
319
  REPR
318
- expect(expectation).to eq(expectation)
320
+ expect(expectation == expectation).to be(true)
319
321
  end
322
+ # rubocop: enable Lint/BinaryOperatorWithIdenticalOperands
320
323
  end # context
321
324
 
322
325
  context 'Parse forest building:' do
326
+ subject(:parsing) do
327
+ parser = GFGEarleyParser.new(b_expr_grammar)
328
+ tokens = expr_tokenizer('3 * 4')
329
+ parser.parse(tokens)
330
+ end
331
+
323
332
  let(:b_expr_grammar) do
324
333
  builder = grammar_expr_builder
325
334
  builder.grammar
@@ -329,14 +338,8 @@ REPR
329
338
  b_expr_grammar.name2symbol[aSymbolName]
330
339
  end
331
340
 
332
- subject do
333
- parser = GFGEarleyParser.new(b_expr_grammar)
334
- tokens = expr_tokenizer('3 * 4')
335
- parser.parse(tokens)
336
- end
337
-
338
- it 'should indicate whether a parse succeeded' do
339
- expect(subject.success?).to be_truthy
341
+ it 'indicates whether a parse succeeded' do
342
+ expect(parsing).to be_success
340
343
  end
341
344
  end # context
342
345
  end # describe
@@ -19,6 +19,8 @@ module Rley # Open this namespace to avoid module qualifier prefixes
19
19
  return Syntax::Production.new(theLHS, theRHSSymbols)
20
20
  end
21
21
 
22
+ subject(:a_set) { described_class.new }
23
+
22
24
  let(:t_a) { Rley::Syntax::Terminal.new('a') }
23
25
  let(:t_b) { Rley::Syntax::Terminal.new('b') }
24
26
  let(:t_c) { Rley::Syntax::Terminal.new('c') }
@@ -39,153 +41,153 @@ module Rley # Open this namespace to avoid module qualifier prefixes
39
41
  let(:entry3) { ParseEntry.new(vertex3, 4) }
40
42
 
41
43
  context 'Initialization:' do
42
- it 'should be created without argument' do
43
- expect { ParseEntrySet.new }.not_to raise_error
44
+ it 'is created without argument' do
45
+ expect { described_class.new }.not_to raise_error
44
46
  end
45
47
 
46
- it 'should be empty after creation' do
47
- expect(subject.entries).to be_empty
48
+ it 'is empty after creation' do
49
+ expect(a_set.entries).to be_empty
48
50
  end
49
51
  end # context
50
52
 
51
53
  context 'Provided services:' do
52
- it 'should accept the addition of an entry' do
54
+ it 'accepts the addition of an entry' do
53
55
  # Case: first time entry addition
54
- expect(subject.push_entry(entry1)).to eq(entry1)
55
- expect(subject).not_to be_empty
56
+ expect(a_set.push_entry(entry1)).to eq(entry1)
57
+ expect(a_set).not_to be_empty
56
58
 
57
59
  # Case: duplicate entry
58
- expect(subject.push_entry(entry1)).to eq(entry1)
60
+ expect(a_set.push_entry(entry1)).to eq(entry1)
59
61
 
60
62
  # Yet another entry
61
- expect(subject.push_entry(entry2)).to eq(entry2)
62
- expect(subject.entries).to eq([entry1, entry2])
63
+ expect(a_set.push_entry(entry2)).to eq(entry2)
64
+ expect(a_set.entries).to eq([entry1, entry2])
63
65
  end
64
66
 
65
- it 'should retrieve the entry at given position' do
66
- subject.push_entry(entry1)
67
- subject.push_entry(entry2)
68
- expect(subject[0]).to eq(entry1)
69
- expect(subject[1]).to eq(entry2)
67
+ it 'retrieves the entry at given position' do
68
+ a_set.push_entry(entry1)
69
+ a_set.push_entry(entry2)
70
+ expect(a_set[0]).to eq(entry1)
71
+ expect(a_set[1]).to eq(entry2)
70
72
  end
71
73
 
72
- it 'should list the entries expecting a given terminal' do
74
+ it 'lists the entries expecting a given terminal' do
73
75
  # Case: an entry expecting a terminal
74
- subject.push_entry(entry1)
75
- expect(subject.entries4term(t_b)).to eq([entry1])
76
+ a_set.push_entry(entry1)
77
+ expect(a_set.entries4term(t_b)).to eq([entry1])
76
78
 
77
79
  # Case: a second entry expecting same terminal
78
- subject.push_entry(entry2)
79
- expect(subject.entries4term(t_b)).to eq([entry1, entry2])
80
+ a_set.push_entry(entry2)
81
+ expect(a_set.entries4term(t_b)).to eq([entry1, entry2])
80
82
  end
81
83
 
82
- it 'should list the expected terminals' do
83
- subject.push_entry(entry1)
84
- subject.push_entry(entry2)
85
- subject.push_entry(entry3)
84
+ it 'lists the expected terminals' do
85
+ a_set.push_entry(entry1)
86
+ a_set.push_entry(entry2)
87
+ a_set.push_entry(entry3)
86
88
 
87
- expect(subject.expected_terminals).to eq([t_b])
89
+ expect(a_set.expected_terminals).to eq([t_b])
88
90
  end
89
91
 
90
- it 'should list the entries expecting a given non-terminal' do
92
+ it 'lists the entries expecting a given non-terminal' do
91
93
  # Case: an entry expecting a non-terminal
92
- subject.push_entry(entry3)
93
- expect(subject.entries4n_term(nt_rep_c)).to eq([entry3])
94
+ a_set.push_entry(entry3)
95
+ expect(a_set.entries4n_term(nt_rep_c)).to eq([entry3])
94
96
  end
95
97
 
96
- it 'should provide human-readable representation of itself' do
98
+ it 'provides human-readable representation of itself' do
97
99
  # Case 1: empty set
98
100
  pattern_empty = /^#<Rley::Parser::ParseEntrySet:\d+ @entries=\[\]>$/
99
- expect(subject.inspect).to match(pattern_empty)
101
+ expect(a_set.inspect).to match(pattern_empty)
100
102
 
101
103
  # Case 2: non-empty set
102
- subject.push_entry(entry1)
104
+ a_set.push_entry(entry1)
103
105
  prefix = /^#<Rley::Parser::ParseEntrySet:\d+ @entries=\[#<Rley/
104
- expect(subject.inspect).to match(prefix)
106
+ expect(a_set.inspect).to match(prefix)
105
107
  pattern_entry = /ParseEntry:\d+ @vertex=<Rley::GFG::ItemVertex:\d+/
106
- expect(subject.inspect).to match(pattern_entry)
108
+ expect(a_set.inspect).to match(pattern_entry)
107
109
  suffix = /=> a \. b b Repetition> @origin=2 @antecedents=\[\]>\]>$/
108
- expect(subject.inspect).to match(suffix)
110
+ expect(a_set.inspect).to match(suffix)
109
111
  end
110
112
 
111
113
  =begin
112
- it 'should list of ambiguous states' do
114
+ it 'lists of ambiguous states' do
113
115
  prod1 = double('fake-production1')
114
116
  prod2 = double('fake-production2')
115
- expect(subject.ambiguities.size).to eq(0)
117
+ expect(a_set.ambiguities.size).to eq(0)
116
118
 
117
119
  # Adding states
118
- subject.push_entry(entry1)
120
+ a_set.push_entry(entry1)
119
121
  expect(vertex1).to receive(:production).and_return(prod1)
120
122
  expect(vertex1).to receive(:"reduce_item?").and_return(true)
121
123
  expect(vertex1).to receive(:lhs).and_return(:something)
122
- expect(subject.ambiguities.size).to eq(0)
124
+ expect(a_set.ambiguities.size).to eq(0)
123
125
  expect(vertex2).to receive(:production).and_return(prod2)
124
126
  expect(vertex2).to receive(:"reduce_item?").and_return(true)
125
127
  expect(vertex2).to receive(:lhs).and_return(:something_else)
126
- subject.push_entry(entry2)
127
- expect(subject.ambiguities.size).to eq(0)
128
+ a_set.push_entry(entry2)
129
+ expect(a_set.ambiguities.size).to eq(0)
128
130
  # dotted_rule3 = double('fake_dotted_rule3')
129
131
  # expect(dotted_rule3).to receive(:production).and_return(prod2)
130
132
  # expect(dotted_rule3).to receive(:"reduce_item?").and_return(true)
131
133
  # expect(dotted_rule3).to receive(:lhs).and_return(:something_else)
132
134
  # entry3 = ParseEntry.new(dotted_rule3, 5)
133
- subject.push_entry(entry3)
134
- expect(subject.ambiguities[0]).to eq([entry2, entry3])
135
+ a_set.push_entry(entry3)
136
+ expect(a_set.ambiguities[0]).to eq([entry2, entry3])
135
137
  end
136
138
  =end
137
139
  =begin
138
- it 'should list the states expecting a given terminal' do
140
+ it 'lists the states expecting a given terminal' do
139
141
  # Case of no state
140
- expect(subject.states_expecting(:a)).to be_empty
142
+ expect(a_set.states_expecting(:a)).to be_empty
141
143
 
142
144
  # Adding states
143
- subject.push_entry(entry1)
144
- subject.push_entry(entry2)
145
+ a_set.push_entry(entry1)
146
+ a_set.push_entry(entry2)
145
147
  expect(vertex1).to receive(:next_symbol).and_return(:b)
146
148
  expect(vertex2).to receive(:next_symbol).and_return(:a)
147
- expect(subject.states_expecting(:a)).to eq([entry2])
148
- expect(subject.states_expecting(:b)).to eq([entry1])
149
+ expect(a_set.states_expecting(:a)).to eq([entry2])
150
+ expect(a_set.states_expecting(:b)).to eq([entry1])
149
151
  end
150
152
 
151
- it 'should list the states related to a production' do
153
+ it 'lists the states related to a production' do
152
154
  a_prod = double('fake-production')
153
155
 
154
156
  # Case of no state
155
- expect(subject.states_for(a_prod)).to be_empty
157
+ expect(a_set.states_for(a_prod)).to be_empty
156
158
 
157
159
  # Adding states
158
- subject.push_entry(entry1)
159
- subject.push_entry(entry2)
160
+ a_set.push_entry(entry1)
161
+ a_set.push_entry(entry2)
160
162
  expect(vertex1).to receive(:production).and_return(:dummy)
161
163
  expect(vertex2).to receive(:production).and_return(a_prod)
162
- expect(subject.states_for(a_prod)).to eq([entry2])
164
+ expect(a_set.states_for(a_prod)).to eq([entry2])
163
165
  end
164
166
 
165
- it 'should list the states that rewrite a given non-terminal' do
167
+ it 'lists the states that rewrite a given non-terminal' do
166
168
  non_term = double('fake-non-terminal')
167
169
  prod1 = double('fake-production1')
168
170
  prod2 = double('fake-production2')
169
171
 
170
172
  # Adding states
171
- subject.push_entry(entry1)
172
- subject.push_entry(entry2)
173
+ a_set.push_entry(entry1)
174
+ a_set.push_entry(entry2)
173
175
  expect(vertex1).to receive(:production).and_return(prod1)
174
176
  expect(prod1).to receive(:lhs).and_return(:dummy)
175
177
  expect(vertex2).to receive(:production).and_return(prod2)
176
178
  expect(vertex2).to receive(:reduce_item?).and_return(true)
177
179
  expect(prod2).to receive(:lhs).and_return(non_term)
178
- expect(subject.states_rewriting(non_term)).to eq([entry2])
180
+ expect(a_set.states_rewriting(non_term)).to eq([entry2])
179
181
  end
180
182
 
181
183
 
182
184
 
183
- it 'should complain when impossible predecessor of parse state' do
184
- subject.push_entry(entry1)
185
- subject.push_entry(entry2)
185
+ it 'complains when impossible predecessor of parse state' do
186
+ a_set.push_entry(entry1)
187
+ a_set.push_entry(entry2)
186
188
  expect(vertex1).to receive(:prev_position).and_return(nil)
187
189
  err = StandardError
188
- expect { subject.predecessor_state(entry1) }.to raise_error(err)
190
+ expect { a_set.predecessor_state(entry1) }.to raise_error(err)
189
191
  end
190
192
  =end
191
193
  end # context