rley 0.7.03 → 0.7.08

Sign up to get free protection for your applications and to get access to all the features.
Files changed (174) hide show
  1. checksums.yaml +4 -4
  2. data/.rubocop.yml +362 -62
  3. data/.travis.yml +6 -7
  4. data/CHANGELOG.md +20 -1
  5. data/LICENSE.txt +1 -1
  6. data/README.md +6 -7
  7. data/Rakefile +2 -0
  8. data/appveyor.yml +2 -4
  9. data/examples/NLP/benchmark_pico_en.rb +2 -0
  10. data/examples/NLP/engtagger.rb +193 -188
  11. data/examples/NLP/nano_eng/nano_en_demo.rb +2 -0
  12. data/examples/NLP/nano_eng/nano_grammar.rb +7 -5
  13. data/examples/NLP/pico_en_demo.rb +2 -0
  14. data/examples/data_formats/JSON/cli_options.rb +3 -1
  15. data/examples/data_formats/JSON/json_ast_builder.rb +14 -9
  16. data/examples/data_formats/JSON/json_ast_nodes.rb +14 -21
  17. data/examples/data_formats/JSON/json_demo.rb +2 -0
  18. data/examples/data_formats/JSON/json_grammar.rb +4 -2
  19. data/examples/data_formats/JSON/json_lexer.rb +10 -8
  20. data/examples/data_formats/JSON/json_minifier.rb +3 -1
  21. data/examples/general/calc_iter1/calc_ast_builder.rb +15 -10
  22. data/examples/general/calc_iter1/calc_ast_nodes.rb +25 -37
  23. data/examples/general/calc_iter1/calc_demo.rb +2 -0
  24. data/examples/general/calc_iter1/calc_grammar.rb +4 -2
  25. data/examples/general/calc_iter1/calc_lexer.rb +8 -4
  26. data/examples/general/calc_iter1/spec/calculator_spec.rb +7 -5
  27. data/examples/general/calc_iter2/calc_ast_builder.rb +7 -3
  28. data/examples/general/calc_iter2/calc_ast_nodes.rb +29 -43
  29. data/examples/general/calc_iter2/calc_demo.rb +2 -0
  30. data/examples/general/calc_iter2/calc_grammar.rb +5 -3
  31. data/examples/general/calc_iter2/calc_lexer.rb +13 -10
  32. data/examples/general/calc_iter2/spec/calculator_spec.rb +28 -26
  33. data/examples/general/left.rb +4 -2
  34. data/examples/general/right.rb +4 -2
  35. data/lib/rley.rb +2 -0
  36. data/lib/rley/base/base_parser.rb +2 -0
  37. data/lib/rley/base/dotted_item.rb +38 -41
  38. data/lib/rley/base/grm_items_builder.rb +2 -0
  39. data/lib/rley/constants.rb +5 -3
  40. data/lib/rley/engine.rb +22 -24
  41. data/lib/rley/formatter/asciitree.rb +6 -4
  42. data/lib/rley/formatter/base_formatter.rb +2 -0
  43. data/lib/rley/formatter/bracket_notation.rb +3 -8
  44. data/lib/rley/formatter/debug.rb +8 -6
  45. data/lib/rley/formatter/json.rb +4 -2
  46. data/lib/rley/gfg/call_edge.rb +3 -1
  47. data/lib/rley/gfg/edge.rb +7 -5
  48. data/lib/rley/gfg/end_vertex.rb +4 -6
  49. data/lib/rley/gfg/epsilon_edge.rb +3 -5
  50. data/lib/rley/gfg/grm_flow_graph.rb +31 -25
  51. data/lib/rley/gfg/item_vertex.rb +12 -22
  52. data/lib/rley/gfg/non_terminal_vertex.rb +6 -4
  53. data/lib/rley/gfg/return_edge.rb +2 -0
  54. data/lib/rley/gfg/scan_edge.rb +3 -1
  55. data/lib/rley/gfg/shortcut_edge.rb +4 -2
  56. data/lib/rley/gfg/start_vertex.rb +6 -8
  57. data/lib/rley/gfg/vertex.rb +47 -41
  58. data/lib/rley/lexical/token.rb +3 -1
  59. data/lib/rley/lexical/token_range.rb +8 -6
  60. data/lib/rley/parse_forest_visitor.rb +7 -5
  61. data/lib/rley/parse_rep/ast_base_builder.rb +11 -11
  62. data/lib/rley/parse_rep/cst_builder.rb +7 -4
  63. data/lib/rley/parse_rep/parse_forest_builder.rb +36 -25
  64. data/lib/rley/parse_rep/parse_forest_factory.rb +5 -3
  65. data/lib/rley/parse_rep/parse_rep_creator.rb +18 -13
  66. data/lib/rley/parse_rep/parse_tree_builder.rb +15 -15
  67. data/lib/rley/parse_rep/parse_tree_factory.rb +27 -25
  68. data/lib/rley/parse_tree_visitor.rb +3 -1
  69. data/lib/rley/parser/error_reason.rb +9 -8
  70. data/lib/rley/parser/gfg_chart.rb +54 -22
  71. data/lib/rley/parser/gfg_earley_parser.rb +3 -1
  72. data/lib/rley/parser/gfg_parsing.rb +51 -31
  73. data/lib/rley/parser/parse_entry.rb +29 -33
  74. data/lib/rley/parser/parse_entry_set.rb +32 -27
  75. data/lib/rley/parser/parse_entry_tracker.rb +6 -4
  76. data/lib/rley/parser/parse_state.rb +18 -21
  77. data/lib/rley/parser/parse_state_tracker.rb +6 -4
  78. data/lib/rley/parser/parse_tracer.rb +15 -13
  79. data/lib/rley/parser/parse_walker_factory.rb +28 -29
  80. data/lib/rley/parser/state_set.rb +11 -10
  81. data/lib/rley/ptree/non_terminal_node.rb +10 -6
  82. data/lib/rley/ptree/parse_tree.rb +6 -4
  83. data/lib/rley/ptree/parse_tree_node.rb +7 -5
  84. data/lib/rley/ptree/terminal_node.rb +9 -7
  85. data/lib/rley/rley_error.rb +12 -10
  86. data/lib/rley/sppf/alternative_node.rb +8 -6
  87. data/lib/rley/sppf/composite_node.rb +9 -7
  88. data/lib/rley/sppf/epsilon_node.rb +5 -3
  89. data/lib/rley/sppf/leaf_node.rb +5 -3
  90. data/lib/rley/sppf/non_terminal_node.rb +2 -0
  91. data/lib/rley/sppf/parse_forest.rb +19 -17
  92. data/lib/rley/sppf/sppf_node.rb +9 -8
  93. data/lib/rley/sppf/token_node.rb +5 -3
  94. data/lib/rley/syntax/grammar.rb +7 -5
  95. data/lib/rley/syntax/grammar_builder.rb +11 -9
  96. data/lib/rley/syntax/grm_symbol.rb +8 -6
  97. data/lib/rley/syntax/literal.rb +2 -0
  98. data/lib/rley/syntax/non_terminal.rb +11 -15
  99. data/lib/rley/syntax/production.rb +13 -11
  100. data/lib/rley/syntax/symbol_seq.rb +10 -10
  101. data/lib/rley/syntax/terminal.rb +6 -5
  102. data/lib/rley/syntax/verbatim_symbol.rb +5 -3
  103. data/lib/support/base_tokenizer.rb +23 -20
  104. data/spec/rley/base/dotted_item_spec.rb +4 -2
  105. data/spec/rley/base/grm_items_builder_spec.rb +2 -0
  106. data/spec/rley/engine_spec.rb +47 -9
  107. data/spec/rley/formatter/asciitree_spec.rb +11 -9
  108. data/spec/rley/formatter/bracket_notation_spec.rb +16 -14
  109. data/spec/rley/formatter/debug_spec.rb +4 -2
  110. data/spec/rley/formatter/json_spec.rb +5 -3
  111. data/spec/rley/gfg/call_edge_spec.rb +2 -0
  112. data/spec/rley/gfg/edge_spec.rb +2 -0
  113. data/spec/rley/gfg/end_vertex_spec.rb +7 -5
  114. data/spec/rley/gfg/epsilon_edge_spec.rb +2 -0
  115. data/spec/rley/gfg/grm_flow_graph_spec.rb +2 -0
  116. data/spec/rley/gfg/item_vertex_spec.rb +12 -10
  117. data/spec/rley/gfg/non_terminal_vertex_spec.rb +5 -3
  118. data/spec/rley/gfg/return_edge_spec.rb +2 -0
  119. data/spec/rley/gfg/scan_edge_spec.rb +2 -0
  120. data/spec/rley/gfg/shortcut_edge_spec.rb +3 -1
  121. data/spec/rley/gfg/start_vertex_spec.rb +7 -5
  122. data/spec/rley/gfg/vertex_spec.rb +5 -3
  123. data/spec/rley/lexical/token_range_spec.rb +18 -16
  124. data/spec/rley/lexical/token_spec.rb +4 -2
  125. data/spec/rley/parse_forest_visitor_spec.rb +167 -163
  126. data/spec/rley/parse_rep/ambiguous_parse_spec.rb +46 -44
  127. data/spec/rley/parse_rep/ast_builder_spec.rb +8 -6
  128. data/spec/rley/parse_rep/cst_builder_spec.rb +7 -5
  129. data/spec/rley/parse_rep/groucho_spec.rb +25 -25
  130. data/spec/rley/parse_rep/parse_forest_builder_spec.rb +28 -26
  131. data/spec/rley/parse_rep/parse_forest_factory_spec.rb +8 -6
  132. data/spec/rley/parse_rep/parse_tree_factory_spec.rb +4 -2
  133. data/spec/rley/parse_tree_visitor_spec.rb +12 -8
  134. data/spec/rley/parser/error_reason_spec.rb +8 -6
  135. data/spec/rley/parser/gfg_chart_spec.rb +17 -4
  136. data/spec/rley/parser/gfg_earley_parser_spec.rb +16 -11
  137. data/spec/rley/parser/gfg_parsing_spec.rb +41 -252
  138. data/spec/rley/parser/parse_entry_set_spec.rb +2 -0
  139. data/spec/rley/parser/parse_entry_spec.rb +21 -19
  140. data/spec/rley/parser/parse_state_spec.rb +7 -5
  141. data/spec/rley/parser/parse_tracer_spec.rb +16 -14
  142. data/spec/rley/parser/parse_walker_factory_spec.rb +10 -8
  143. data/spec/rley/parser/state_set_spec.rb +24 -22
  144. data/spec/rley/ptree/non_terminal_node_spec.rb +7 -3
  145. data/spec/rley/ptree/parse_tree_node_spec.rb +6 -4
  146. data/spec/rley/ptree/parse_tree_spec.rb +2 -0
  147. data/spec/rley/ptree/terminal_node_spec.rb +8 -6
  148. data/spec/rley/sppf/alternative_node_spec.rb +8 -6
  149. data/spec/rley/sppf/non_terminal_node_spec.rb +5 -3
  150. data/spec/rley/sppf/token_node_spec.rb +6 -4
  151. data/spec/rley/support/ambiguous_grammar_helper.rb +5 -4
  152. data/spec/rley/support/expectation_helper.rb +2 -0
  153. data/spec/rley/support/grammar_abc_helper.rb +4 -4
  154. data/spec/rley/support/grammar_ambig01_helper.rb +6 -5
  155. data/spec/rley/support/grammar_arr_int_helper.rb +6 -5
  156. data/spec/rley/support/grammar_b_expr_helper.rb +6 -5
  157. data/spec/rley/support/grammar_helper.rb +2 -0
  158. data/spec/rley/support/grammar_l0_helper.rb +15 -16
  159. data/spec/rley/support/grammar_pb_helper.rb +8 -5
  160. data/spec/rley/support/grammar_sppf_helper.rb +3 -1
  161. data/spec/rley/syntax/grammar_builder_spec.rb +7 -5
  162. data/spec/rley/syntax/grammar_spec.rb +8 -6
  163. data/spec/rley/syntax/grm_symbol_spec.rb +3 -1
  164. data/spec/rley/syntax/literal_spec.rb +2 -0
  165. data/spec/rley/syntax/non_terminal_spec.rb +10 -8
  166. data/spec/rley/syntax/production_spec.rb +15 -13
  167. data/spec/rley/syntax/symbol_seq_spec.rb +4 -2
  168. data/spec/rley/syntax/terminal_spec.rb +7 -5
  169. data/spec/rley/syntax/verbatim_symbol_spec.rb +3 -1
  170. data/spec/spec_helper.rb +2 -12
  171. data/spec/support/base_tokenizer_spec.rb +9 -2
  172. metadata +21 -63
  173. data/.simplecov +0 -7
  174. data/Gemfile +0 -8
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require 'ostruct'
2
4
  require_relative '../../spec_helper'
3
5
 
@@ -31,7 +33,7 @@ module Rley # Open this namespace to avoid module qualifier prefixes
31
33
  terminal = OpenStruct.new(name: aSymbolName)
32
34
  return OpenStruct.new(lexeme: aLexeme, terminal: terminal)
33
35
  end
34
-
36
+
35
37
  it 'should accept the addition of subnodes' do
36
38
  child1 = double('first_child')
37
39
  child2 = double('second_child')
@@ -42,6 +44,7 @@ module Rley # Open this namespace to avoid module qualifier prefixes
42
44
  expect(subject.subnodes).to eq([child3, child2, child1])
43
45
  end
44
46
 
47
+ # rubocop: disable Naming/VariableNumber
45
48
  it 'should provide a text representation of itself' do
46
49
  # Case 1: no child
47
50
  expected_text = 'VP[0, 3]'
@@ -59,9 +62,9 @@ module Rley # Open this namespace to avoid module qualifier prefixes
59
62
  noun = build_token('bus', 'Noun')
60
63
  child_3_1 = TerminalNode.new(noun, range(2, 3))
61
64
  # We reverse the sequence of subnode addition
62
- subject.add_subnode(child_1_2)
65
+ subject.add_subnode(child_1_2)
63
66
  subject.add_subnode(child_1_1)
64
- child_1_2.add_subnode(child_2_2)
67
+ child_1_2.add_subnode(child_2_2)
65
68
  child_1_2.add_subnode(child_2_1)
66
69
  child_2_2.add_subnode(child_3_1)
67
70
  expected_text = <<-SNIPPET
@@ -74,6 +77,7 @@ VP[0, 3]
74
77
  SNIPPET
75
78
  expect(subject.to_string(0)).to eq(expected_text.chomp)
76
79
  end
80
+ # rubocop: enable Naming/VariableNumber
77
81
  end # context
78
82
  end # describe
79
83
  end # module
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require_relative '../../spec_helper'
2
4
 
3
5
  # Load the class under test
@@ -25,17 +27,17 @@ module Rley # Open this namespace to avoid module qualifier prefixes
25
27
  expect(subject.range).to eq(sample_range)
26
28
  end
27
29
  end # context
28
-
30
+
29
31
  context 'Initialization:' do
30
32
  it 'should assign undefined range bounds' do
31
33
  partial_range = { low: 0 } # High bound left undefined
32
34
  instance = ParseTreeNode.new(sample_symbol, partial_range)
33
-
35
+
34
36
  another = { low: 1, high: 4 } # High bound is specified
35
37
  instance.range = another
36
38
  expect(instance.range).to eq(low: 0, high: 4)
37
- end
38
- end # context
39
+ end
40
+ end # context
39
41
  end # describe
40
42
  end # module
41
43
  end # module
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require_relative '../../spec_helper'
2
4
 
3
5
  require_relative '../support/grammar_abc_helper'
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require 'ostruct'
2
4
  require_relative '../../spec_helper'
3
5
 
@@ -8,23 +10,23 @@ module Rley # Open this namespace to avoid module qualifier prefixes
8
10
  module PTree # Open this namespace to avoid module qualifier prefixes
9
11
  describe TerminalNode do
10
12
  let(:sample_symbol) { OpenStruct.new(name: 'Noun') }
11
- let(:sample_token) do
12
- OpenStruct.new(lexeme: 'world', terminal: sample_symbol)
13
+ let(:sample_token) do
14
+ OpenStruct.new(lexeme: 'world', terminal: sample_symbol)
13
15
  end
14
16
  let(:sample_range) { double('fake-range') }
15
-
17
+
16
18
  subject { TerminalNode.new(sample_token, sample_range) }
17
-
19
+
18
20
  context 'Initialization:' do
19
21
  it 'should be bound to a token' do
20
22
  expect(subject.token).to eq(sample_token)
21
23
  end
22
24
  end # context
23
-
25
+
24
26
  context 'Provided services:' do
25
27
  it 'should provide a text representation of itself' do
26
28
  expected_text = "Noun[?, ?]: 'world'"
27
- expect(subject.to_string(0)).to eq(expected_text)
29
+ expect(subject.to_string(0)).to eq(expected_text)
28
30
  end
29
31
  end # context
30
32
  end # describe
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require 'ostruct'
2
4
  require_relative '../../spec_helper'
3
5
 
@@ -17,7 +19,7 @@ module Rley # Open this namespace to avoid module qualifier prefixes
17
19
  def range(low, high)
18
20
  return Lexical::TokenRange.new(low: low, high: high)
19
21
  end
20
-
22
+
21
23
  let(:t_a) { Syntax::Terminal.new('A') }
22
24
  let(:t_b) { Syntax::Terminal.new('B') }
23
25
  let(:t_c) { Syntax::Terminal.new('C') }
@@ -27,7 +29,7 @@ module Rley # Open this namespace to avoid module qualifier prefixes
27
29
  end
28
30
  let(:sample_item) { Base::DottedItem.new(sample_prod, 3) }
29
31
  let(:sample_vertex) { GFG::ItemVertex.new(sample_item) }
30
- let(:sample_range) { range(0, 3) }
32
+ let(:sample_range) { range(0, 3) }
31
33
 
32
34
  subject { AlternativeNode.new(sample_vertex, sample_range) }
33
35
 
@@ -35,9 +37,9 @@ module Rley # Open this namespace to avoid module qualifier prefixes
35
37
  it 'should be created with a item vertex and a token range' do
36
38
  expect { AlternativeNode.new(sample_vertex, sample_range) }
37
39
  .not_to raise_error
38
- end
40
+ end
39
41
  end
40
-
42
+
41
43
  context 'Initialization:' do
42
44
  it 'should know its token range' do
43
45
  expect(subject.range).to eq(sample_range)
@@ -48,7 +50,7 @@ module Rley # Open this namespace to avoid module qualifier prefixes
48
50
  expect(subject.subnodes).to be_empty
49
51
  end
50
52
  end # context
51
-
53
+
52
54
  context 'Provided services:' do
53
55
  it 'should accept the addition of subnodes' do
54
56
  subnode1 = double('first_subnode')
@@ -61,7 +63,7 @@ module Rley # Open this namespace to avoid module qualifier prefixes
61
63
  end
62
64
 
63
65
 
64
- it 'should have a string representation' do
66
+ it 'should have a string representation' do
65
67
  expect(subject.to_string(0)).to eq('Alt(sentence => A B C .)[0, 3]')
66
68
  end
67
69
  end # context
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require 'ostruct'
2
4
  require_relative '../../spec_helper'
3
5
 
@@ -35,12 +37,12 @@ module Rley # Open this namespace to avoid module qualifier prefixes
35
37
  it "shouldn't have children yet" do
36
38
  expect(subject.subnodes).to be_empty
37
39
  end
38
-
40
+
39
41
  it 'should have :and refinement' do
40
42
  expect(subject.refinement).to eq(:and)
41
43
  end
42
44
  end # context
43
-
45
+
44
46
  context 'Provided services:' do
45
47
  it 'should accept the addition of subnodes' do
46
48
  subnode1 = double('first_subnode')
@@ -55,7 +57,7 @@ module Rley # Open this namespace to avoid module qualifier prefixes
55
57
  it 'should have a string representation' do
56
58
  expect(subject.to_string(0)).to eq('VP[0, 3]')
57
59
  end
58
-
60
+
59
61
  it 'should return a key value of itself' do
60
62
  expect(subject.key).to eq('VP[0, 3]')
61
63
  end
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require 'ostruct'
2
4
  require_relative '../../spec_helper'
3
5
 
@@ -13,8 +15,8 @@ module Rley # Open this namespace to avoid module qualifier prefixes
13
15
  describe TokenNode do
14
16
  let(:sample_symbol) { Syntax::Terminal.new('Noun') }
15
17
  let(:sample_position) { Lexical::Position.new(3, 4) }
16
- let(:sample_token) do
17
- Lexical::Token.new('language', sample_symbol, sample_position)
18
+ let(:sample_token) do
19
+ Lexical::Token.new('language', sample_symbol, sample_position)
18
20
  end
19
21
  let(:sample_rank) { 3 }
20
22
 
@@ -31,13 +33,13 @@ module Rley # Open this namespace to avoid module qualifier prefixes
31
33
  expect(subject.range.high).to eq(sample_rank + 1)
32
34
  end
33
35
  end # context
34
-
36
+
35
37
  context 'Provided services:' do
36
38
  it 'should know its string representation' do
37
39
  expect(subject.to_string(0)).to eq('Noun[3, 4]')
38
40
  expect(subject.inspect).to eq('Noun[3, 4]')
39
41
  end
40
-
42
+
41
43
  it 'should return a key value of itself' do
42
44
  expect(subject.key).to eq('Noun[3, 4]')
43
45
  end
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  # Load the builder class
2
4
  require_relative '../../../lib/rley/syntax/grammar_builder'
3
5
  require_relative '../../../lib/rley/lexical/token'
@@ -7,14 +9,13 @@ module AmbiguousGrammarHelper
7
9
  # Factory method. Creates a grammar builder for a basic ambiguous
8
10
  # expression grammar.
9
11
  # (based on an example from Fisher and LeBlanc: "Crafting a Compiler")
10
- def grammar_builder()
11
- builder = Rley::Syntax::GrammarBuilder.new do
12
+ def grammar_builder
13
+ Rley::Syntax::GrammarBuilder.new do
12
14
  add_terminals('+', 'id')
13
15
  rule 'S' => 'E'
14
- rule 'E' => %w[E + E]
16
+ rule 'E' => 'E + E'
15
17
  rule 'E' => 'id'
16
18
  end
17
- builder
18
19
  end
19
20
 
20
21
  # Basic tokenizing method
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  # Load the builder class
2
4
  require_relative '../../../lib/rley/lexical/token'
3
5
 
@@ -1,18 +1,18 @@
1
+ # frozen_string_literal: true
2
+
1
3
  # Load the builder class
2
4
  require_relative '../../../lib/rley/syntax/grammar_builder'
3
5
 
4
6
  module GrammarABCHelper
5
7
  # Factory method. Creates a grammar builder for a simple grammar.
6
8
  # (based on example in N. Wirth "Compiler Construction" book, p. 6)
7
- def grammar_abc_builder()
8
- builder = Rley::Syntax::GrammarBuilder.new do
9
+ def grammar_abc_builder
10
+ Rley::Syntax::GrammarBuilder.new do
9
11
  add_terminals('a', 'b', 'c')
10
12
  rule 'S' => 'A'
11
13
  rule 'A' => 'a A c'
12
14
  rule 'A' => 'b'
13
15
  end
14
-
15
- return builder
16
16
  end
17
17
  end # module
18
18
  # End of file
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  # Load the builder class
2
4
  require_relative '../../../lib/rley/syntax/grammar_builder'
3
5
  require_relative '../../../lib/rley/lexical/token'
@@ -8,16 +10,15 @@ module GrammarAmbig01Helper
8
10
  # Factory method. Define a grammar for a very simple language
9
11
  # Grammar 3: An ambiguous arithmetic expression language
10
12
  # (based on example in article on Earley's algorithm in Wikipedia)
11
- def grammar_ambig01_builder()
12
- builder = Rley::Syntax::GrammarBuilder.new do
13
+ def grammar_ambig01_builder
14
+ Rley::Syntax::GrammarBuilder.new do
13
15
  add_terminals('integer', '+', '*')
14
16
  rule 'P' => 'S'
15
- rule 'S' => %w[S + S]
16
- rule 'S' => %w[S * S]
17
+ rule 'S' => 'S + S'
18
+ rule 'S' => 'S * S'
17
19
  rule 'S' => 'L'
18
20
  rule 'L' => 'integer'
19
21
  end
20
- builder
21
22
  end
22
23
 
23
24
  # Highly simplified tokenizer implementation.
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require 'strscan'
2
4
 
3
5
  # Load the builder class
@@ -9,17 +11,16 @@ module GrammarArrIntHelper
9
11
  # Factory method. Creates a grammar builder for a grammar of
10
12
  # array of integers.
11
13
  # (based on the article about Earley's algorithm in Wikipedia)
12
- def grammar_arr_int_builder()
13
- builder = Rley::Syntax::GrammarBuilder.new do
14
+ def grammar_arr_int_builder
15
+ Rley::Syntax::GrammarBuilder.new do
14
16
  add_terminals('[', ']', ',', 'integer')
15
17
  rule 'P' => 'arr'
16
- rule 'arr' => %w([ sequence ])
18
+ rule 'arr' => '[ sequence ]'
17
19
  rule 'sequence' => 'list'
18
20
  rule 'sequence' => []
19
- rule 'list' => %w[list , integer] # Left-recursive rule
21
+ rule 'list' => 'list , integer' # Left-recursive rule
20
22
  rule 'list' => 'integer'
21
23
  end
22
- builder
23
24
  end
24
25
 
25
26
  # Basic tokenizer for array of integers
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  # Load the builder class
2
4
  require_relative '../../../lib/rley/syntax/grammar_builder'
3
5
  require_relative '../../../lib/rley/lexical/token'
@@ -7,17 +9,16 @@ module GrammarBExprHelper
7
9
  # Factory method. Creates a grammar builder for a basic arithmetic
8
10
  # expression grammar.
9
11
  # (based on the article about Earley's algorithm in Wikipedia)
10
- def grammar_expr_builder()
11
- builder = Rley::Syntax::GrammarBuilder.new do
12
+ def grammar_expr_builder
13
+ Rley::Syntax::GrammarBuilder.new do
12
14
  add_terminals('+', '*', 'integer')
13
15
  rule 'P' => 'S'
14
- rule 'S' => %w[S + M]
16
+ rule 'S' => 'S + M'
15
17
  rule 'S' => 'M'
16
- rule 'M' => %w[M * T]
18
+ rule 'M' => 'M * T'
17
19
  rule 'M' => 'T'
18
20
  rule 'T' => 'integer'
19
21
  end
20
- builder
21
22
  end
22
23
 
23
24
  # Basic expression tokenizer
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  # Load the builder class
2
4
  require_relative '../../../lib/rley/lexical/token'
3
5
 
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  # Load the builder class
2
4
  require_relative '../../../lib/rley/syntax/grammar_builder'
3
5
  require_relative '../../../lib/rley/lexical/token'
@@ -9,29 +11,28 @@ module GrammarL0Helper
9
11
  # based on Jurafky & Martin L0 language (chapter 12 of the book).
10
12
  # It defines the syntax of a sentence in a language with a
11
13
  # very limited syntax and lexicon in the context of airline reservation.
12
- def grammar_l0_builder()
13
- builder = Rley::Syntax::GrammarBuilder.new do
14
+ def grammar_l0_builder
15
+ Rley::Syntax::GrammarBuilder.new do
14
16
  add_terminals('Noun', 'Verb', 'Pronoun', 'Proper-Noun')
15
17
  add_terminals('Determiner', 'Preposition')
16
- rule 'S' => %w[NP VP]
18
+ rule 'S' => 'NP VP'
17
19
  rule 'NP' => 'Pronoun'
18
20
  rule 'NP' => 'Proper-Noun'
19
- rule 'NP' => %w[Determiner Nominal]
20
- rule 'Nominal' => %w[Nominal Noun]
21
+ rule 'NP' => 'Determiner Nominal'
22
+ rule 'Nominal' => 'Nominal Noun'
21
23
  rule 'Nominal' => 'Noun'
22
24
  rule 'VP' => 'Verb'
23
- rule 'VP' => %w[Verb NP]
24
- rule 'VP' => %w[Verb NP PP]
25
- rule 'VP' => %w[Verb PP]
26
- rule 'PP' => %w[Preposition PP]
25
+ rule 'VP' => 'Verb NP'
26
+ rule 'VP' => 'Verb NP PP'
27
+ rule 'VP' => 'Verb PP'
28
+ rule 'PP' => 'Preposition PP'
27
29
  end
28
- builder
29
30
  end
30
31
 
31
32
  # Return the language lexicon.
32
33
  # A lexicon is just a Hash with pairs of the form:
33
34
  # word => terminal symbol name
34
- def lexicon_l0()
35
+ def lexicon_l0
35
36
  return {
36
37
  'flight' => 'Noun',
37
38
  'breeze' => 'Noun',
@@ -76,13 +77,11 @@ module GrammarL0Helper
76
77
  word = scanner.scan(/\S+/)
77
78
  break unless word
78
79
 
79
- term_name = lexicon_l0[word]
80
- if term_name.nil?
81
- raise StandardError, "Word '#{word}' not found in lexicon"
82
- end
80
+ term = lexicon_l0[word]
81
+ raise StandardError, "Word '#{word}' not found in lexicon" if term.nil?
83
82
 
84
83
  pos = Rley::Lexical::Position.new(1, curr_pos + 1)
85
- tokens << Rley::Lexical::Token.new(word, term_name, pos)
84
+ tokens << Rley::Lexical::Token.new(word, term, pos)
86
85
  end
87
86
 
88
87
  return tokens
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  # Load the builder class
2
4
  require_relative '../../../lib/rley/syntax/grammar_builder'
3
5
  require_relative '../../../lib/support/base_tokenizer'
@@ -10,7 +12,7 @@ class GrammarPBHelper
10
12
  # expression based on example found in paper of
11
13
  # K. Pingali and G. Bilardi:
12
14
  # "A Graphical Model for Context-Free Grammar Parsing"
13
- def grammar()
15
+ def grammar
14
16
  @grammar ||= begin
15
17
  builder = Rley::Syntax::GrammarBuilder.new do
16
18
  add_terminals('int', '+', '(', ')')
@@ -22,12 +24,13 @@ class GrammarPBHelper
22
24
  builder.grammar
23
25
  end
24
26
  end
25
-
27
+
26
28
  class PB_Tokenizer < BaseTokenizer
27
29
  protected
28
30
 
29
- def recognize_token()
30
- if (lexeme = scanner.scan(/[\(\)]/)) # Single characters
31
+ # rubocop: disable Lint/DuplicateBranch
32
+ def recognize_token
33
+ if (lexeme = scanner.scan(/[()]/)) # Single characters
31
34
  # Delimiters, separators => single character token
32
35
  build_token(lexeme, lexeme)
33
36
  elsif (lexeme = scanner.scan(/(?:\+)(?=\s|$)/)) # Isolated char
@@ -36,6 +39,7 @@ class GrammarPBHelper
36
39
  build_token('int', lexeme)
37
40
  end
38
41
  end
42
+ # rubocop: enable Lint/DuplicateBranch
39
43
  end # class
40
44
 
41
45
  # Basic tokenizer
@@ -44,6 +48,5 @@ class GrammarPBHelper
44
48
  tokenizer = PB_Tokenizer.new(aText)
45
49
  tokenizer.tokens
46
50
  end
47
-
48
51
  end # class
49
52
  # End of file