rley 0.7.04 → 0.7.05

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 9fa143547cbc9df1103e8d5fa196a77fb1dde67edc118f450ba34e2d7338be18
4
- data.tar.gz: 3a21aa20217d4bf09b9816229b5d5a75a804a8f41c0bb14fc8572f5119f1140b
3
+ metadata.gz: 6bac50f391e9fa77fd20e283cea1d15ec7729ba7663f022e0b7c2eaf3dfa4d2d
4
+ data.tar.gz: e85f49f87c86c2397a4ebf05b53b9fe4d7fc55ea77829892575c588424f2c789
5
5
  SHA512:
6
- metadata.gz: 2bd17209dd119cf971f3ccf9f8cafb319955add4714f54d6d3a2baa542fc5a528942d71276ab9110081d1eed6d48d39bfec0f435ef55c47eb0d12dd1aa5b2801
7
- data.tar.gz: 500b641afa30d01ba6f28ef8fe23d9f157db67985f66fc95a6a07bdff099946a2faeffb09bdd2651a7871edfa756799c9e5768bb3010a8528d8f4a758f5933e3
6
+ metadata.gz: 0443dcd4ee4c92def8f3ec0d81418f62344ae66d4839b6449e336953cbb34c31209b09389036f7c0a64ae53b04f01e132f0eef44ec16069446c86b82056996af
7
+ data.tar.gz: 7e078d135b134924794118386e25fed879e2d85862160e06708b711db15d01560b4ab2562489630125ee395324e5430b3b8354652c139238cc240ff8218bc780
@@ -1,3 +1,6 @@
1
+ ### 0.7.05 / 2019-11-117
2
+ - [FIX] Method `GFGParsing#nullable_rule`: issue with nullable productions having at least one member in their rhs.
3
+
1
4
  ### 0.7.04 / 2019-08-17
2
5
  - Rley recognizer is about 25% faster than previous version. Kudos to the people
3
6
  behind the *magic_frozen_string_literal* gem.
@@ -11,10 +11,6 @@ environment:
11
11
  - Ruby_version: 24
12
12
  - Ruby_version: 23
13
13
 
14
- # These are failing
15
- #
16
- #
17
-
18
14
  install:
19
15
  - set PATH=C:\Ruby%Ruby_version%\bin;%PATH%
20
16
  - gem update --system
@@ -5,7 +5,7 @@
5
5
 
6
6
  module Rley # Module used as a namespace
7
7
  # The version number of the gem.
8
- Version = '0.7.04'
8
+ Version = '0.7.05'
9
9
 
10
10
  # Brief description of the gem.
11
11
  Description = "Ruby implementation of the Earley's parsing algorithm"
@@ -25,7 +25,7 @@ module Rley # This module is used as a namespace
25
25
  # @return [String] Input substring that is an instance of the terminal.
26
26
  attr_reader(:lexeme)
27
27
 
28
- # @return [Syntax::Terminal] Terminal symbol corresponding to the lexeme.
28
+ # @return [String] The name of terminal symbol matching the lexeme.
29
29
  attr_reader(:terminal)
30
30
 
31
31
  # @return [Position] The position of the lexeme in the source file.
@@ -42,7 +42,9 @@ module Rley # This module is used as a namespace
42
42
  # @param theChildren [Array] Children nodes (one per rhs symbol)
43
43
  def new_parent_node(aProduction, aRange, _tokens, theChildren)
44
44
  node = Rley::PTree::NonTerminalNode.new(aProduction.lhs, aRange)
45
- theChildren.reverse_each { |child| node.add_subnode(child) }
45
+ if theChildren
46
+ theChildren.reverse_each { |child| node.add_subnode(child) }
47
+ end
46
48
  return node
47
49
  end
48
50
  end # class
@@ -89,6 +89,7 @@ module Rley # This module is used as a namespace
89
89
  when :backtrack
90
90
  # Restore path
91
91
  @curr_path = entry2path_to_alt[anEntry].dup
92
+ raise StandardError, "path is nil for #{anEntry}" if curr_path.nil?
92
93
 
93
94
  when :revisit
94
95
  # Retrieve the already existing node corresponding
@@ -151,6 +152,7 @@ module Rley # This module is used as a namespace
151
152
  when :backtrack
152
153
  # Restore path
153
154
  @curr_path = entry2path_to_alt[anEntry].dup
155
+ raise StandardError, 'path is nil' if curr_path.nil?
154
156
  create_alternative_node(anEntry)
155
157
 
156
158
  when :revisit
@@ -32,6 +32,11 @@ module Rley # This module is used as a namespace
32
32
  end
33
33
  rescue StopIteration
34
34
  # Do nothing: gobble the exception
35
+ rescue StandardError => exc
36
+ if exc.message =~ /^Ambiguous/
37
+ $stderr.puts parsing
38
+ end
39
+ raise exc
35
40
  end
36
41
 
37
42
  a_builder.done!
@@ -57,8 +57,8 @@ module Rley # This module is used as a namespace
57
57
  if anIndex == sets.size
58
58
  err_msg = "Internal error: unexpected push reason #{reason}"
59
59
  raise StandardError, err_msg if reason != :scan_rule
60
-
61
- add_entry_set
60
+
61
+ add_entry_set
62
62
  end
63
63
  pushed = self[anIndex].push_entry(new_entry)
64
64
 
@@ -93,7 +93,39 @@ module Rley # This module is used as a namespace
93
93
  entry.origin.zero? && entry.vertex.non_terminal == start_symbol
94
94
  end
95
95
 
96
- return success_entries.first
96
+ success_entries.first
97
+ end
98
+
99
+ # @return [Integer] The number of states.
100
+ def count_states
101
+ sets.size
102
+ end
103
+
104
+ # @return [Integer] The total number of entries.
105
+ def count_entries
106
+ sets.reduce(0) do |sub_result, a_set|
107
+ sub_result += a_set.size
108
+ end
109
+ end
110
+
111
+ # @return [Integer] The total number of edges.
112
+ def count_edges
113
+ sets.reduce(0) do |sub_result, a_set|
114
+ sub_result += a_set.count_edges
115
+ end
116
+ end
117
+
118
+ # @ return [String] A human-readable representation of the chart.
119
+ def to_s
120
+ result = +''
121
+ sets.each_with_index do |a_set, i|
122
+ result << "State[#{i}]\n"
123
+ a_set.entries.each do |item|
124
+ result << ' ' + item.to_s + "\n"
125
+ end
126
+ end
127
+
128
+ result
97
129
  end
98
130
 
99
131
  private
@@ -15,7 +15,7 @@ module Rley # This module is used as a namespace
15
15
  attr_reader(:gf_graph)
16
16
 
17
17
  # The link to the chart object
18
- # @return [GFGChart]
18
+ # @return [Parser::GFGChart]
19
19
  attr_reader(:chart)
20
20
 
21
21
  # The sequence of input token to parse
@@ -74,6 +74,11 @@ module Rley # This module is used as a namespace
74
74
  # [B., i]
75
75
  # [A => alpha B . beta, k]
76
76
  def nullable_rule(anEntry, aPosition)
77
+ # Terminology:
78
+ # .B : start node
79
+ # B => . rhs : entry node
80
+ # B => rhs . : exit node
81
+ # B. : end node
77
82
  next_symbol = anEntry.next_symbol
78
83
  pos = aPosition
79
84
  start = gf_graph.start_vertex_for[next_symbol]
@@ -85,6 +90,7 @@ module Rley # This module is used as a namespace
85
90
  succ = edge.successor # succ always an ItemVertex
86
91
  if succ.dotted_item.production.nullable?
87
92
  succ_entry = apply_rule(start_entry, succ, pos, pos, :nullable_rule)
93
+ next unless succ_entry.exit_entry?
88
94
  apply_rule(succ_entry, end_vertex, pos, pos, :nullable_rule)
89
95
  end
90
96
  end
@@ -179,7 +185,7 @@ module Rley # This module is used as a namespace
179
185
  # followed the syntax specified by the grammar)
180
186
  def success?()
181
187
  return false if @failure_reason
182
-
188
+
183
189
  return chart.accepting_entry ? true : false
184
190
  end
185
191
 
@@ -239,6 +245,27 @@ END_MSG
239
245
  end
240
246
  end
241
247
 
248
+ def count_states
249
+ chart.count_states
250
+ end
251
+
252
+ def count_entries
253
+ chart.count_entries
254
+ end
255
+
256
+ def count_edges
257
+ chart.count_edges
258
+ end
259
+
260
+ # @return [String] A human readable representation of itself.
261
+ def to_s()
262
+ result = +''
263
+ result << "success? #{success?}\n"
264
+ result << "chart:\n"
265
+ result << chart.to_s
266
+
267
+ result
268
+ end
242
269
 
243
270
  private
244
271
 
@@ -273,7 +300,7 @@ END_MSG
273
300
  =begin
274
301
  # Invariant checks
275
302
  antecedents = antecedence[consequent]
276
-
303
+
277
304
  case aVertex
278
305
  when Rley::GFG::EndVertex
279
306
  # Rule: has 1..* antecedents, all of them are exit items
@@ -44,6 +44,7 @@ module Rley # This module is used as a namespace
44
44
  end
45
45
 
46
46
  # Add a link to an antecedent parse entry
47
+ # @param anAntecedent[ParseEntry]
47
48
  def add_antecedent(anAntecedent)
48
49
  antecedents << anAntecedent unless antecedents.include?(anAntecedent)
49
50
  end
@@ -98,6 +98,12 @@ module Rley # This module is used as a namespace
98
98
  def expected_terminals()
99
99
  return @entries4term.keys
100
100
  end
101
+
102
+ def count_edges
103
+ entries.reduce(0) do |sub_result, entry|
104
+ sub_result += entry.vertex.edges.size
105
+ end
106
+ end
101
107
 
102
108
  private
103
109
 
@@ -176,7 +176,7 @@ module Rley # This module is used as a namespace
176
176
  select_antecedent(aContext)
177
177
  end
178
178
 
179
- return entries
179
+ entries
180
180
  end
181
181
 
182
182
  # Handle the case of an entry having one antecedent only
@@ -213,6 +213,7 @@ module Rley # This module is used as a namespace
213
213
  case aContext.curr_entry.vertex
214
214
  when GFG::EndVertex
215
215
  # puts "Add backtrack point stack #{aContext.curr_entry}"
216
+ # aContext.curr_entry.antecedents.each { |antec| puts "\t#{antec}" }
216
217
  bp = add_backtrack_point(aContext)
217
218
  new_entry = bp.visitee.antecedents[bp.antecedent_index]
218
219
 
@@ -101,6 +101,23 @@ module Rley # Open this namespace to avoid module qualifier prefixes
101
101
  sample_text = 'a a b c c'
102
102
  ABCTokenizer.new(sample_text)
103
103
  end
104
+
105
+ it 'should build a parse tree even for a nullable production' do
106
+ instance = Engine.new
107
+ instance.build_grammar do
108
+ add_terminals('a', 'b', 'c')
109
+ add_production 'S' => 'A BC'
110
+ add_production 'A' => 'a'
111
+ add_production 'BC' => 'B_opt C_opt'
112
+ add_production 'B_opt' => 'b'
113
+ add_production 'B_opt' => []
114
+ add_production 'C_opt' => 'c'
115
+ add_production 'C_opt' => []
116
+ end
117
+ input = ABCTokenizer.new('a')
118
+ raw_result = instance.parse(input)
119
+ expect { instance.to_ptree(raw_result) }.not_to raise_error
120
+ end
104
121
 
105
122
  it 'should build default parse trees' do
106
123
  raw_result = subject.parse(sample_tokenizer)
@@ -136,6 +153,23 @@ module Rley # Open this namespace to avoid module qualifier prefixes
136
153
  ABCTokenizer.new(sample_text)
137
154
  end
138
155
 
156
+ it 'should build a parse forest even for a nullable production' do
157
+ instance = Engine.new
158
+ instance.build_grammar do
159
+ add_terminals('a', 'b', 'c')
160
+ add_production 'S' => 'A BC'
161
+ add_production 'A' => 'a'
162
+ add_production 'BC' => 'B_opt C_opt'
163
+ add_production 'B_opt' => 'b'
164
+ add_production 'B_opt' => []
165
+ add_production 'C_opt' => 'c'
166
+ add_production 'C_opt' => []
167
+ end
168
+ input = ABCTokenizer.new('a')
169
+ raw_result = instance.parse(input)
170
+ expect { instance.to_pforest(raw_result) }.not_to raise_error
171
+ end
172
+
139
173
  it 'should build parse forest' do
140
174
  raw_result = subject.parse(sample_tokenizer)
141
175
  pforest = subject.to_pforest(raw_result)
@@ -77,13 +77,13 @@ module Rley # Open this namespace to avoid module qualifier prefixes
77
77
 
78
78
  context 'Provided services:' do
79
79
  it 'should accept the pushing of a parse entry in existing set' do
80
- expect(subject.sets[0].entries.size).to eq(1)
80
+ expect(subject.sets[0].entries.size).to eq(1)
81
81
  subject.push_entry(second_vertex, 0, 0, :scan_rule)
82
82
  expect(subject.sets[0].entries.size).to eq(2)
83
83
  end
84
-
84
+
85
85
  it 'should accept the pushing of a parse entry in new set' do
86
- expect(subject.sets[0].entries.size).to eq(1)
86
+ expect(subject.sets[0].entries.size).to eq(1)
87
87
  subject.push_entry(second_vertex, 0, 1, :scan_rule)
88
88
  expect(subject.sets[0].entries.size).to eq(1)
89
89
  expect(subject.sets.size).to eq(2)
@@ -92,7 +92,18 @@ module Rley # Open this namespace to avoid module qualifier prefixes
92
92
 
93
93
  it 'should retrieve an existing set at given position' do
94
94
  expect(subject[0]).to eq(subject.sets[0])
95
- end
95
+ end
96
+
97
+ it 'should a user-friendly text representation of itself' do
98
+ subject.push_entry(second_vertex, 0, 1, :scan_rule)
99
+ representation = <<REPR
100
+ State[0]
101
+ .S | 0
102
+ State[1]
103
+ S => . A | 0
104
+ REPR
105
+ expect(subject.to_s).to eq(representation)
106
+ end
96
107
  end # context
97
108
  end # describe
98
109
  end # module
@@ -286,10 +286,11 @@ module Rley # Open this namespace to avoid module qualifier prefixes
286
286
  # A => ;
287
287
  t_x = Syntax::VerbatimSymbol.new('x')
288
288
 
289
- builder = Syntax::GrammarBuilder.new
290
- builder.add_terminals(t_x)
291
- builder.add_production('Ss' => %w[A A x])
292
- builder.add_production('A' => [])
289
+ builder = Syntax::GrammarBuilder.new do
290
+ add_terminals(t_x)
291
+ rule 'Ss' => 'A A x'
292
+ rule 'A' => []
293
+ end
293
294
  pos = Lexical::Position.new(1, 1)
294
295
  tokens = [Lexical::Token.new('x', t_x, pos)]
295
296
 
@@ -27,7 +27,7 @@ module Rley # Open this namespace to avoid module qualifier prefixes
27
27
  include GrammarABCHelper # Mix-in module with builder for grammar abc
28
28
  include GrammarBExprHelper # Mix-in with builder for simple expressions
29
29
  include GrammarHelper # Mix-in with method for creating token sequence
30
-
30
+
31
31
  # Helper method. Create an array of dotted items
32
32
  # from the given grammar
33
33
  def build_items_for_grammar(aGrammar)
@@ -166,7 +166,7 @@ SNIPPET
166
166
  expect(subject.chart[0].size).to eq(3)
167
167
  # Last entry is: (.A, 0)
168
168
  dot_A_entry = subject.chart[0].last
169
-
169
+
170
170
  subject.start_rule(dot_A_entry, 0)
171
171
 
172
172
  # Expectations: two entries:
@@ -196,7 +196,7 @@ SNIPPET
196
196
  expect(last_entry.vertex.label).to eq('A => a . A c')
197
197
  expect(last_entry.origin).to eq(0)
198
198
  antecedence = subject.antecedence
199
- expect(antecedence.fetch(last_entry)).to eq([fourth_entry])
199
+ expect(antecedence.fetch(last_entry)).to eq([fourth_entry])
200
200
  end
201
201
 
202
202
  it 'should apply the exit rule correctly' do
@@ -221,7 +221,7 @@ SNIPPET
221
221
  exit_entry = subject.chart[1].last
222
222
  expect(exit_entry.vertex.label).to eq('A.')
223
223
  expect(exit_entry.origin).to eq(0)
224
- expect(subject.antecedence.fetch(exit_entry)).to eq([last_entry])
224
+ expect(subject.antecedence.fetch(exit_entry)).to eq([last_entry])
225
225
  end
226
226
 
227
227
  it 'should apply the end rule correctly' do
@@ -250,6 +250,7 @@ SNIPPET
250
250
  expect(end_entry.origin).to eq(0)
251
251
  expect(subject.antecedence.fetch(end_entry)).to eq([exit_entry])
252
252
  end
253
+
253
254
  =begin
254
255
 
255
256
 
@@ -284,18 +285,7 @@ SNIPPET
284
285
  =end
285
286
  end # context
286
287
 
287
- context 'Parse forest building:' do
288
- let(:sample_grammar1) do
289
- builder = grammar_abc_builder
290
- builder.grammar
291
- end
292
-
293
- let(:token_seq1) do
294
- %w[a a b c c].map do |letter|
295
- Lexical::Token.new(letter, sample_grammar1.name2symbol[letter])
296
- end
297
- end
298
-
288
+ context 'Provided services:' do
299
289
  let(:b_expr_grammar) do
300
290
  builder = grammar_expr_builder
301
291
  builder.grammar
@@ -310,247 +300,51 @@ SNIPPET
310
300
  tokens = expr_tokenizer('2 + 3 * 4')
311
301
  parser.parse(tokens)
312
302
  end
313
-
314
- it 'should indicate whether a parse succeeded' do
315
- expect(subject.success?).to be_truthy
316
- end
317
303
 
318
- it 'should build a parse forest' do
319
- expect { subject.parse_forest }.not_to raise_error if subject.success?
320
- end
321
- =begin
322
- it 'should create the root of a parse forest' do
323
- (entry_tracker, builder) = prepare_parse_forest(subject)
324
- # The root node should correspond to the start symbol and
325
- # its direct children should correspond to rhs of start production
326
- expected_text = <<-SNIPPET
327
- P[0, 5]
328
- +- S[0, 5]
329
- SNIPPET
330
- root_text = builder.root.to_string(0)
331
- expect(root_text).to eq(expected_text.chomp)
332
-
333
- expect(entry_tracker.entry_set_index).to eq(subject.tokens.size)
334
- expected_entry = 'P => S . | 0'
335
- expect(entry_tracker.parse_entry.to_s).to eq(expected_entry)
336
- expect(builder.current_node.to_string(0)).to eq('S[0, 5]')
337
- end
338
- =end
339
- =begin
340
- it 'should use a reduce item for a matched non-terminal' do
341
- # Setup
342
- (entry_tracker, builder) = prepare_parse_tree(subject)
343
- # Same entry as in previous example
344
-
345
- # Given matched symbol is S[0, 5]
346
- # And its reduce item is S => S + M . | 0
347
- # Then add child nodes corresponding to the rhs symbols
348
- # And make M[?, 5] the current symbol
349
- subject.insert_matched_symbol(entry_tracker, builder)
350
- expected_text = <<-SNIPPET
351
- P[0, 5]
352
- +- S[0, 5]
353
- +- S[0, ?]
354
- +- +[?, ?]: '(nil)'
355
- +- M[?, 5]
356
- SNIPPET
357
- root_text = builder.root.to_string(0)
358
- expect(root_text).to eq(expected_text.chomp)
359
- expected_entry = 'S => S + M . | 0'
360
- expect(entry_tracker.parse_entry.to_s).to eq(expected_entry)
361
- expect(entry_tracker.entry_set_index).to eq(5)
362
- expect(builder.current_node.to_string(0)).to eq('M[?, 5]')
363
-
364
- # Second similar test
365
-
366
- # Given matched symbol is M[?, 5]
367
- # And its reduce item is M => M * T . | 2
368
- # Then add child nodes corresponding to the rhs symbols
369
- # And make T[?, 5] the current symbol
370
- subject.insert_matched_symbol(entry_tracker, builder)
371
- expected_text = <<-SNIPPET
372
- P[0, 5]
373
- +- S[0, 5]
374
- +- S[0, ?]
375
- +- +[?, ?]: '(nil)'
376
- +- M[2, 5]
377
- +- M[2, ?]
378
- +- *[?, ?]: '(nil)'
379
- +- T[?, 5]
380
- SNIPPET
381
- root_text = builder.root.to_string(0)
382
- expect(root_text).to eq(expected_text.chomp)
383
- expected_entry = 'M => M * T . | 2'
384
- expect(entry_tracker.parse_entry.to_s).to eq(expected_entry)
385
- expect(entry_tracker.entry_set_index).to eq(5)
386
- expect(builder.current_node.to_string(0)).to eq('T[?, 5]')
304
+ it 'should give a text representation of itself' do
305
+ repr = subject.to_s
306
+ expect(repr).to match /^success\? true/
307
+
308
+ # Let's test the last chart state only
309
+ expectation = <<REPR
310
+ State[5]
311
+ T => integer . | 4
312
+ T. | 4
313
+ M => M * T . | 2
314
+ M. | 2
315
+ S => S + M . | 0
316
+ M => M . * T | 2
317
+ S. | 0
318
+ P => S . | 0
319
+ S => S . + M | 0
320
+ P. | 0
321
+ REPR
387
322
  end
323
+ end # context
388
324
 
389
-
390
-
391
- it 'should use a previous item for a terminal symbol' do
392
- # Setup
393
- (entry_tracker, builder) = prepare_parse_tree(subject)
394
- 3.times do
395
- subject.insert_matched_symbol(entry_tracker, builder)
396
- end
397
-
398
- # Given matched symbol is T[?, 5]
399
- # And its reduce item is T => integer . | 4
400
- # Then add child node corresponding to the rhs symbol
401
- # And make integer[4, 5]: '(nil)' the current symbol
402
- expected_text = <<-SNIPPET
403
- P[0, 5]
404
- +- S[0, 5]
405
- +- S[0, ?]
406
- +- +[?, ?]: '(nil)'
407
- +- M[2, 5]
408
- +- M[2, ?]
409
- +- *[?, ?]: '(nil)'
410
- +- T[4, 5]
411
- +- integer[4, 5]: '(nil)'
412
- SNIPPET
413
- root_text = builder.root.to_string(0)
414
- expect(root_text).to eq(expected_text.chomp)
415
- expected_entry = 'T => integer . | 4'
416
- expect(entry_tracker.parse_entry.to_s).to eq(expected_entry)
417
- expect(entry_tracker.entry_set_index).to eq(5)
418
- integer_repr = "integer[4, 5]: '(nil)'"
419
- expect(builder.current_node.to_string(0)).to eq(integer_repr)
420
-
421
- # Given current tree symbol is integer[4, 5]: '(nil)'
422
- # And its previous item is T => . integer | 4
423
- # Then attach the token to the terminal node
424
- # And decrement the entry index by one
425
- # Make *[?, ?]: '(nil)' the current symbol
426
- subject.insert_matched_symbol(entry_tracker, builder)
427
- expected_text = <<-SNIPPET
428
- P[0, 5]
429
- +- S[0, 5]
430
- +- S[0, ?]
431
- +- +[?, ?]: '(nil)'
432
- +- M[2, 5]
433
- +- M[2, ?]
434
- +- *[?, ?]: '(nil)'
435
- +- T[4, 5]
436
- +- integer[4, 5]: '4'
437
- SNIPPET
438
- root_text = builder.root.to_string(0)
439
- expect(root_text).to eq(expected_text.chomp)
440
- expected_entry = 'T => . integer | 4'
441
- expect(entry_tracker.parse_entry.to_s).to eq(expected_entry)
442
- expect(entry_tracker.entry_set_index).to eq(4)
443
- next_symbol = "*[?, ?]: '(nil)'"
444
- expect(builder.current_node.to_string(0)).to eq(next_symbol)
325
+ context 'Parse forest building:' do
326
+ let(:b_expr_grammar) do
327
+ builder = grammar_expr_builder
328
+ builder.grammar
445
329
  end
446
330
 
447
- it 'should handle [no symbol before dot, terminal tree node] case' do
448
- # Setup
449
- (entry_tracker, builder) = prepare_parse_tree(subject)
450
- 4.times do
451
- subject.insert_matched_symbol(entry_tracker, builder)
452
- end
453
-
454
- # Given current tree symbol is *[?, ?]: '(nil)'
455
- # And current dotted item is T => . integer | 4
456
- # When one retrieves the parse entry expecting the T
457
- # Then new parse entry is changed to: M => M * . T | 2
458
- subject.insert_matched_symbol(entry_tracker, builder)
459
-
460
- expected_text = <<-SNIPPET
461
- P[0, 5]
462
- +- S[0, 5]
463
- +- S[0, ?]
464
- +- +[?, ?]: '(nil)'
465
- +- M[2, 5]
466
- +- M[2, ?]
467
- +- *[?, ?]: '(nil)'
468
- +- T[4, 5]
469
- +- integer[4, 5]: '4'
470
- SNIPPET
471
- root_text = builder.root.to_string(0)
472
- expect(root_text).to eq(expected_text.chomp)
473
- expected_entry = 'M => M * . T | 2'
474
- expect(entry_tracker.parse_entry.to_s).to eq(expected_entry)
475
- expect(entry_tracker.entry_set_index).to eq(4)
476
- next_symbol = "*[?, ?]: '(nil)'"
477
- expect(builder.current_node.to_string(0)).to eq(next_symbol)
478
-
479
- subject.insert_matched_symbol(entry_tracker, builder)
480
- next_symbol = 'M[2, ?]'
481
- expect(builder.current_node.to_string(0)).to eq(next_symbol)
331
+ def grm_symbol(aSymbolName)
332
+ b_expr_grammar.name2symbol[aSymbolName]
482
333
  end
483
334
 
484
- it 'should handle the end of parse tree generation' do
485
- # Begin setup
486
- is_done = false
487
- (entry_tracker, builder) = prepare_parse_tree(subject)
488
- 16.times do
489
- is_done = subject.insert_matched_symbol(entry_tracker, builder)
490
- end
491
-
492
- expected_text = <<-SNIPPET
493
- P[0, 5]
494
- +- S[0, 5]
495
- +- S[0, 1]
496
- +- M[0, 1]
497
- +- T[0, 1]
498
- +- integer[0, 1]: '2'
499
- +- +[1, 2]: '+'
500
- +- M[2, 5]
501
- +- M[2, 3]
502
- +- T[2, 3]
503
- +- integer[2, 3]: '3'
504
- +- *[3, 4]: '*'
505
- +- T[4, 5]
506
- +- integer[4, 5]: '4'
507
- SNIPPET
508
- root_text = builder.root.to_string(0)
509
- expect(root_text).to eq(expected_text.chomp)
510
-
511
- expected_entry = 'T => . integer | 0'
512
- expect(entry_tracker.parse_entry.to_s).to eq(expected_entry)
513
- expect(entry_tracker.entry_set_index).to eq(0)
514
- expect(is_done).to eq(true)
335
+ subject do
336
+ parser = GFGEarleyParser.new(b_expr_grammar)
337
+ tokens = expr_tokenizer('3 * 4')
338
+ parser.parse(tokens)
515
339
  end
516
340
 
517
-
518
-
519
- it 'should build the parse tree for a simple non-ambiguous grammar' do
520
- parser = EarleyParser.new(sample_grammar1)
521
- instance = parser.parse(token_seq1)
522
- ptree = instance.parse_tree
523
- expect(ptree).to be_kind_of(PTree::ParseTree)
341
+ it 'should indicate whether a parse succeeded' do
342
+ expect(subject.success?).to be_truthy
524
343
  end
525
344
 
526
- it 'should build the parse tree for a simple expression grammar' do
527
- parser = EarleyParser.new(b_expr_grammar)
528
- tokens = expr_tokenizer('2 + 3 * 4', b_expr_grammar)
529
- instance = parser.parse(tokens)
530
- ptree = instance.parse_tree
531
- expect(ptree).to be_kind_of(PTree::ParseTree)
532
-
533
- # Expect parse tree:
534
- expected_text = <<-SNIPPET
535
- P[0, 5]
536
- +- S[0, 5]
537
- +- S[0, 1]
538
- +- M[0, 1]
539
- +- T[0, 1]
540
- +- integer[0, 1]: '2'
541
- +- +[1, 2]: '+'
542
- +- M[2, 5]
543
- +- M[2, 3]
544
- +- T[2, 3]
545
- +- integer[2, 3]: '3'
546
- +- *[3, 4]: '*'
547
- +- T[4, 5]
548
- +- integer[4, 5]: '4'
549
- SNIPPET
550
- actual = ptree.root.to_string(0)
551
- expect(actual).to eq(expected_text.chomp)
345
+ it 'should build a parse forest' do
346
+ expect { subject.parse_forest }.not_to raise_error if subject.success?
552
347
  end
553
- =end
554
348
  end # context
555
349
  end # describe
556
350
  end # module
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: rley
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.7.04
4
+ version: 0.7.05
5
5
  platform: ruby
6
6
  authors:
7
7
  - Dimitri Geshef
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2019-08-17 00:00:00.000000000 Z
11
+ date: 2019-11-17 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: coveralls