simply_stored 0.1.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (121) hide show
  1. data/lib/simply_stored/class_methods_base.rb +31 -0
  2. data/lib/simply_stored/couch/belongs_to.rb +117 -0
  3. data/lib/simply_stored/couch/ext/couch_potato.rb +16 -0
  4. data/lib/simply_stored/couch/has_many.rb +148 -0
  5. data/lib/simply_stored/couch/has_one.rb +93 -0
  6. data/lib/simply_stored/couch/validations.rb +74 -0
  7. data/lib/simply_stored/couch/views/array_property_view_spec.rb +22 -0
  8. data/lib/simply_stored/couch/views.rb +1 -0
  9. data/lib/simply_stored/couch.rb +278 -0
  10. data/lib/simply_stored/instance_methods.rb +143 -0
  11. data/lib/simply_stored/simpledb/associations.rb +196 -0
  12. data/lib/simply_stored/simpledb/attributes.rb +173 -0
  13. data/lib/simply_stored/simpledb/storag.rb +85 -0
  14. data/lib/simply_stored/simpledb/validations.rb +88 -0
  15. data/lib/simply_stored/simpledb.rb +212 -0
  16. data/lib/simply_stored/storage.rb +93 -0
  17. data/lib/simply_stored.rb +9 -0
  18. data/test/custom_views_test.rb +33 -0
  19. data/test/fixtures/couch.rb +182 -0
  20. data/test/fixtures/simpledb/item.rb +11 -0
  21. data/test/fixtures/simpledb/item_daddy.rb +8 -0
  22. data/test/fixtures/simpledb/log_item.rb +3 -0
  23. data/test/fixtures/simpledb/namespace_bar.rb +5 -0
  24. data/test/fixtures/simpledb/namespace_foo.rb +7 -0
  25. data/test/fixtures/simpledb/protected_item.rb +3 -0
  26. data/test/simply_stored_couch_test.rb +1684 -0
  27. data/test/simply_stored_simpledb_test.rb +1341 -0
  28. data/test/test_helper.rb +22 -0
  29. data/test/vendor/dhaka-2.2.1/lib/dhaka/dot/dot.rb +29 -0
  30. data/test/vendor/dhaka-2.2.1/lib/dhaka/evaluator/evaluator.rb +133 -0
  31. data/test/vendor/dhaka-2.2.1/lib/dhaka/grammar/closure_hash.rb +15 -0
  32. data/test/vendor/dhaka-2.2.1/lib/dhaka/grammar/grammar.rb +240 -0
  33. data/test/vendor/dhaka-2.2.1/lib/dhaka/grammar/grammar_symbol.rb +27 -0
  34. data/test/vendor/dhaka-2.2.1/lib/dhaka/grammar/precedence.rb +19 -0
  35. data/test/vendor/dhaka-2.2.1/lib/dhaka/grammar/production.rb +36 -0
  36. data/test/vendor/dhaka-2.2.1/lib/dhaka/lexer/accept_actions.rb +36 -0
  37. data/test/vendor/dhaka-2.2.1/lib/dhaka/lexer/alphabet.rb +21 -0
  38. data/test/vendor/dhaka-2.2.1/lib/dhaka/lexer/compiled_lexer.rb +46 -0
  39. data/test/vendor/dhaka-2.2.1/lib/dhaka/lexer/dfa.rb +121 -0
  40. data/test/vendor/dhaka-2.2.1/lib/dhaka/lexer/lexeme.rb +32 -0
  41. data/test/vendor/dhaka-2.2.1/lib/dhaka/lexer/lexer.rb +70 -0
  42. data/test/vendor/dhaka-2.2.1/lib/dhaka/lexer/lexer_run.rb +78 -0
  43. data/test/vendor/dhaka-2.2.1/lib/dhaka/lexer/regex_grammar.rb +392 -0
  44. data/test/vendor/dhaka-2.2.1/lib/dhaka/lexer/regex_parser.rb +2010 -0
  45. data/test/vendor/dhaka-2.2.1/lib/dhaka/lexer/regex_tokenizer.rb +14 -0
  46. data/test/vendor/dhaka-2.2.1/lib/dhaka/lexer/specification.rb +96 -0
  47. data/test/vendor/dhaka-2.2.1/lib/dhaka/lexer/state.rb +68 -0
  48. data/test/vendor/dhaka-2.2.1/lib/dhaka/lexer/state_machine.rb +37 -0
  49. data/test/vendor/dhaka-2.2.1/lib/dhaka/parser/action.rb +55 -0
  50. data/test/vendor/dhaka-2.2.1/lib/dhaka/parser/channel.rb +58 -0
  51. data/test/vendor/dhaka-2.2.1/lib/dhaka/parser/compiled_parser.rb +51 -0
  52. data/test/vendor/dhaka-2.2.1/lib/dhaka/parser/conflict.rb +54 -0
  53. data/test/vendor/dhaka-2.2.1/lib/dhaka/parser/item.rb +42 -0
  54. data/test/vendor/dhaka-2.2.1/lib/dhaka/parser/parse_result.rb +50 -0
  55. data/test/vendor/dhaka-2.2.1/lib/dhaka/parser/parse_tree.rb +66 -0
  56. data/test/vendor/dhaka-2.2.1/lib/dhaka/parser/parser.rb +165 -0
  57. data/test/vendor/dhaka-2.2.1/lib/dhaka/parser/parser_methods.rb +11 -0
  58. data/test/vendor/dhaka-2.2.1/lib/dhaka/parser/parser_run.rb +39 -0
  59. data/test/vendor/dhaka-2.2.1/lib/dhaka/parser/parser_state.rb +74 -0
  60. data/test/vendor/dhaka-2.2.1/lib/dhaka/parser/token.rb +22 -0
  61. data/test/vendor/dhaka-2.2.1/lib/dhaka/runtime.rb +51 -0
  62. data/test/vendor/dhaka-2.2.1/lib/dhaka/tokenizer/tokenizer.rb +190 -0
  63. data/test/vendor/dhaka-2.2.1/lib/dhaka.rb +62 -0
  64. data/test/vendor/dhaka-2.2.1/test/all_tests.rb +5 -0
  65. data/test/vendor/dhaka-2.2.1/test/arithmetic/arithmetic_evaluator.rb +64 -0
  66. data/test/vendor/dhaka-2.2.1/test/arithmetic/arithmetic_evaluator_test.rb +43 -0
  67. data/test/vendor/dhaka-2.2.1/test/arithmetic/arithmetic_grammar.rb +41 -0
  68. data/test/vendor/dhaka-2.2.1/test/arithmetic/arithmetic_grammar_test.rb +9 -0
  69. data/test/vendor/dhaka-2.2.1/test/arithmetic/arithmetic_test_methods.rb +9 -0
  70. data/test/vendor/dhaka-2.2.1/test/arithmetic/arithmetic_tokenizer.rb +39 -0
  71. data/test/vendor/dhaka-2.2.1/test/arithmetic/arithmetic_tokenizer_test.rb +38 -0
  72. data/test/vendor/dhaka-2.2.1/test/arithmetic_precedence/arithmetic_precedence_evaluator.rb +43 -0
  73. data/test/vendor/dhaka-2.2.1/test/arithmetic_precedence/arithmetic_precedence_grammar.rb +24 -0
  74. data/test/vendor/dhaka-2.2.1/test/arithmetic_precedence/arithmetic_precedence_grammar_test.rb +30 -0
  75. data/test/vendor/dhaka-2.2.1/test/arithmetic_precedence/arithmetic_precedence_lexer_specification.rb +23 -0
  76. data/test/vendor/dhaka-2.2.1/test/arithmetic_precedence/arithmetic_precedence_parser_test.rb +33 -0
  77. data/test/vendor/dhaka-2.2.1/test/brackets/bracket_grammar.rb +23 -0
  78. data/test/vendor/dhaka-2.2.1/test/brackets/bracket_tokenizer.rb +22 -0
  79. data/test/vendor/dhaka-2.2.1/test/brackets/brackets_test.rb +28 -0
  80. data/test/vendor/dhaka-2.2.1/test/chittagong/chittagong_driver.rb +46 -0
  81. data/test/vendor/dhaka-2.2.1/test/chittagong/chittagong_driver_test.rb +276 -0
  82. data/test/vendor/dhaka-2.2.1/test/chittagong/chittagong_evaluator.rb +284 -0
  83. data/test/vendor/dhaka-2.2.1/test/chittagong/chittagong_evaluator_test.rb +38 -0
  84. data/test/vendor/dhaka-2.2.1/test/chittagong/chittagong_grammar.rb +104 -0
  85. data/test/vendor/dhaka-2.2.1/test/chittagong/chittagong_lexer.rb +109 -0
  86. data/test/vendor/dhaka-2.2.1/test/chittagong/chittagong_lexer_specification.rb +37 -0
  87. data/test/vendor/dhaka-2.2.1/test/chittagong/chittagong_lexer_test.rb +58 -0
  88. data/test/vendor/dhaka-2.2.1/test/chittagong/chittagong_parser.rb +879 -0
  89. data/test/vendor/dhaka-2.2.1/test/chittagong/chittagong_parser_test.rb +55 -0
  90. data/test/vendor/dhaka-2.2.1/test/chittagong/chittagong_test.rb +170 -0
  91. data/test/vendor/dhaka-2.2.1/test/core/another_lalr_but_not_slr_grammar.rb +20 -0
  92. data/test/vendor/dhaka-2.2.1/test/core/compiled_parser_test.rb +44 -0
  93. data/test/vendor/dhaka-2.2.1/test/core/dfa_test.rb +170 -0
  94. data/test/vendor/dhaka-2.2.1/test/core/evaluator_test.rb +22 -0
  95. data/test/vendor/dhaka-2.2.1/test/core/grammar_test.rb +83 -0
  96. data/test/vendor/dhaka-2.2.1/test/core/lalr_but_not_slr_grammar.rb +19 -0
  97. data/test/vendor/dhaka-2.2.1/test/core/lexer_test.rb +139 -0
  98. data/test/vendor/dhaka-2.2.1/test/core/malformed_grammar.rb +7 -0
  99. data/test/vendor/dhaka-2.2.1/test/core/malformed_grammar_test.rb +8 -0
  100. data/test/vendor/dhaka-2.2.1/test/core/nullable_grammar.rb +21 -0
  101. data/test/vendor/dhaka-2.2.1/test/core/parse_result_test.rb +44 -0
  102. data/test/vendor/dhaka-2.2.1/test/core/parser_state_test.rb +24 -0
  103. data/test/vendor/dhaka-2.2.1/test/core/parser_test.rb +131 -0
  104. data/test/vendor/dhaka-2.2.1/test/core/precedence_grammar.rb +17 -0
  105. data/test/vendor/dhaka-2.2.1/test/core/precedence_grammar_test.rb +9 -0
  106. data/test/vendor/dhaka-2.2.1/test/core/rr_conflict_grammar.rb +21 -0
  107. data/test/vendor/dhaka-2.2.1/test/core/simple_grammar.rb +22 -0
  108. data/test/vendor/dhaka-2.2.1/test/core/sr_conflict_grammar.rb +16 -0
  109. data/test/vendor/dhaka-2.2.1/test/dhaka_test_helper.rb +17 -0
  110. data/test/vendor/dhaka-2.2.1/test/fake_logger.rb +17 -0
  111. data/test/vendor/simplerdb-0.2/lib/simplerdb/client_exception.rb +10 -0
  112. data/test/vendor/simplerdb-0.2/lib/simplerdb/db.rb +146 -0
  113. data/test/vendor/simplerdb-0.2/lib/simplerdb/query_language.rb +266 -0
  114. data/test/vendor/simplerdb-0.2/lib/simplerdb/server.rb +33 -0
  115. data/test/vendor/simplerdb-0.2/lib/simplerdb/servlet.rb +191 -0
  116. data/test/vendor/simplerdb-0.2/lib/simplerdb.rb +3 -0
  117. data/test/vendor/simplerdb-0.2/test/functional_test.rb +81 -0
  118. data/test/vendor/simplerdb-0.2/test/query_evaluator_test.rb +73 -0
  119. data/test/vendor/simplerdb-0.2/test/query_parser_test.rb +64 -0
  120. data/test/vendor/simplerdb-0.2/test/simplerdb_test.rb +80 -0
  121. metadata +182 -0
@@ -0,0 +1,14 @@
1
+ module Dhaka
2
+ module LexerSupport
3
+ class RegexTokenizer < Tokenizer
4
+
5
+ for_state TOKENIZER_IDLE_STATE do
6
+ for_characters(Dhaka::LexerSupport::ALL_CHARACTERS) do
7
+ create_token(curr_char, nil)
8
+ advance
9
+ end
10
+ end
11
+
12
+ end
13
+ end
14
+ end
@@ -0,0 +1,96 @@
1
+ module Dhaka
2
+ # Abstract base class for lexer specifications.
3
+ #
4
+ # Use this to specify the transformations that will be performed when the lexer recognizes a given pattern. Actions are listed in
5
+ # descending order of priority. For example in the following lexer specification:
6
+ #
7
+ # class LexerSpec < Dhaka::LexerSpecification
8
+ # for_pattern 'zz' do
9
+ # "recognized two zs"
10
+ # end
11
+ #
12
+ # for_pattern '\w(\w|\d)*' do
13
+ # "recognized word token #{current_lexeme.value}"
14
+ # end
15
+ #
16
+ # for_pattern '(\d)+(\.\d+)?' do
17
+ # "recognized number #{current_lexeme.value}"
18
+ # end
19
+ #
20
+ # for_pattern ' +' do
21
+ # #ignores whitespace
22
+ # end
23
+ #
24
+ # for_pattern "\n+" do
25
+ # "recognized newline"
26
+ # end
27
+ # end
28
+ #
29
+ # the pattern 'zz' takes precedence over the pattern immediately below it, so the lexer will announce that it has recognized two
30
+ # 'z's instead of a word token.
31
+ #
32
+ # The patterns are <i>not</i> Ruby regular expressions - a lot of operators featured in Ruby's regular expression engine are not yet supported.
33
+ # See http://dhaka.rubyforge.org/regex_grammar.html for the current syntax. Patterns may be specified using Ruby regular expression literals
34
+ # as well as string literals.
35
+ #
36
+ # There are a few things to keep in mind with regard to the regular expression implementation:
37
+ # * The greediest matching expression always wins. Precedences are only used when the same set of characters matches
38
+ # multiple expressions.
39
+ # * All quantifiers are greedy. There is as yet no support for non-greedy modifiers.
40
+ # * The lookahead operator "/" can behave in counter-intuitive ways in situations where the pre-lookahead-operator expression and the
41
+ # post-lookahead-operator expression have characters in common. For example the expression "(ab)+/abcd", when applied to the input
42
+ # "abababcd" will yield "ababab" as the match instead of "abab". A good thumb rule is that the pre-lookahead expression is greedy.
43
+ # * There is no support for characters beyond those specified in the grammar above. This means that there is no support for extended ASCII or unicode characters.
44
+
45
+
46
+ class LexerSpecification
47
+ class << self
48
+ # Associates +blk+ as the action to be performed when a lexer recognizes +pattern+. When Lexer#lex is invoked,
49
+ # it creates a LexerRun object that provides the context for +blk+ to be evaluated in. Methods available in this block
50
+ # are LexerRun#current_lexeme and LexerRun#create_token.
51
+ def for_pattern(pattern, &blk)
52
+ source = case pattern
53
+ when String then pattern
54
+ when Regexp then pattern.source
55
+ end
56
+ items[source] = LexerSpecificationItem.new(source, priority, blk)
57
+ self.priority += 1
58
+ end
59
+
60
+ # Use this to automatically handle escaping for regular expression metacharacters. For example,
61
+ # for_symbol('+') { ... }
62
+ # translates to:
63
+ # for_pattern('\+') { ... }
64
+ def for_symbol(symbol, &blk)
65
+ if LexerSupport::OPERATOR_CHARACTERS.include?(symbol)
66
+ for_pattern("\\#{symbol}", &blk)
67
+ else
68
+ for_pattern(symbol, &blk)
69
+ end
70
+ end
71
+
72
+ private
73
+ def inherited(specification)
74
+ class << specification
75
+ attr_accessor :items, :priority
76
+ end
77
+ specification.items = {}
78
+ specification.priority = 0
79
+ end
80
+
81
+ end
82
+ end
83
+
84
+ class LexerSpecificationItem #:nodoc:
85
+ include Comparable
86
+ attr_reader :pattern, :action, :priority
87
+ def initialize(pattern, priority, action)
88
+ @pattern, @priority, @action = pattern, priority, action
89
+ end
90
+
91
+ def <=> other
92
+ priority <=> other.priority
93
+ end
94
+ end
95
+ end
96
+
@@ -0,0 +1,68 @@
1
+ module Dhaka
2
+ module LexerSupport
3
+
4
+ class State
5
+ attr_reader :transitions, :checkpoint_actions, :action
6
+ def initialize(state_machine, action=nil)
7
+ @state_machine = state_machine
8
+ @transitions = {}
9
+ @checkpoint_actions = []
10
+ @action = action
11
+ end
12
+
13
+ def accepting?
14
+ @action
15
+ end
16
+
17
+ def process lexer_run
18
+ checkpoint_actions.each {|action| action.call(lexer_run)}
19
+ action.call(lexer_run) if accepting?
20
+ end
21
+
22
+ def for_characters *characters, &blk
23
+ dest_state = @state_machine.instance_eval(&blk)
24
+ characters.each do |char|
25
+ transitions[char] = dest_state
26
+ end
27
+ end
28
+
29
+ def add_checkpoint(pattern)
30
+ checkpoint_actions << LexerSupport::CheckpointAction.new(pattern)
31
+ end
32
+
33
+ def accept(pattern)
34
+ @action = AcceptAction.new(pattern)
35
+ end
36
+
37
+ def accept_with_lookahead(pattern)
38
+ @action = LookaheadAcceptAction.new(pattern)
39
+ end
40
+
41
+ def recognize pattern
42
+ @pattern = pattern
43
+ end
44
+
45
+ def compile_to_ruby_source
46
+ result = " at_state(#{object_id}) {\n"
47
+ result << " #{action.compile_to_ruby_source}\n" if action
48
+ checkpoint_actions.each do |checkpoint_action|
49
+ result << " #{checkpoint_action.compile_to_ruby_source}\n"
50
+ end
51
+ transition_keys_by_destination_state = Hash.new {|hash, key| hash[key] = []}
52
+ transitions.each do |key, dest_state|
53
+ transition_keys_by_destination_state[dest_state.object_id] << key
54
+ end
55
+
56
+ transition_keys_by_destination_state.keys.each do |state_id|
57
+ transition_keys = transition_keys_by_destination_state[state_id].collect {|transition_key| "#{transition_key.inspect}"}.join(', ')
58
+ result << " for_characters(#{transition_keys}) { switch_to #{state_id} }\n"
59
+ end
60
+
61
+ result << " }"
62
+ result
63
+ end
64
+ end
65
+
66
+ end
67
+ end
68
+
@@ -0,0 +1,37 @@
1
+ module Dhaka
2
+ module LexerSupport
3
+ class StateMachine
4
+ attr_reader :start_state
5
+
6
+ def initialize start_key
7
+ @states = Hash.new do |hash, key|
8
+ new_state = new_state_for_key key
9
+ hash[key] = new_state
10
+ transition_characters(key).each do |char|
11
+ dest_key = dest_key_for(key, char)
12
+ dest_state = hash[dest_key]
13
+ new_state.transitions[char] = dest_state
14
+ end
15
+ new_state
16
+ end
17
+ @start_state = @states[start_key]
18
+ end
19
+
20
+ def to_dot
21
+ Dot::Digraph.new(:fontsize => 10, :shape => :circle, :size => 5) do |g|
22
+ start = 'Start'
23
+ g.node(start, :label => start)
24
+ g.edge(start, @start_state)
25
+ @states.values.each do |state|
26
+ state_attributes = {}
27
+ state_attributes.merge!(:shape => :doublecircle, :label => state.action.to_dot) if state.accepting?
28
+ g.node(state, state_attributes)
29
+ state.transitions.each do |transition_key, dest_state|
30
+ g.edge(state, dest_state, :label => transition_key.inspect)
31
+ end
32
+ end
33
+ end.to_dot
34
+ end
35
+ end
36
+ end
37
+ end
@@ -0,0 +1,55 @@
1
+ module Dhaka
2
+ # Encapsulates code for Parser actions.
3
+ class Action #:nodoc:
4
+ attr_reader :action_code
5
+ end
6
+
7
+ class ShiftAction < Action #:nodoc:
8
+ attr_reader :destination_state
9
+ def initialize destination_state
10
+ @destination_state = destination_state
11
+ @action_code = Proc.new do
12
+ state_stack << destination_state
13
+ []
14
+ end
15
+ end
16
+
17
+ def compile_to_ruby_source
18
+ "shift_to #{destination_state.id}"
19
+ end
20
+
21
+ def to_s
22
+ "Shift"
23
+ end
24
+ end
25
+
26
+ class ReduceAction < Action #:nodoc:
27
+ attr_reader :production
28
+
29
+ def initialize(production)
30
+ @production = production
31
+ @action_code = Proc.new do
32
+ composite_node = ParseTreeCompositeNode.new(production)
33
+
34
+ production.expansion.each do |symbol|
35
+ state_stack.pop
36
+ composite_node.child_nodes.unshift(node_stack.pop)
37
+ end
38
+
39
+ node_stack << composite_node.instance_eval(&production.action)
40
+
41
+ unless composite_node.head_node?
42
+ @symbol_queue.concat [@current_token.symbol_name, production.symbol.name]
43
+ end
44
+ end
45
+ end
46
+
47
+ def compile_to_ruby_source
48
+ "reduce_with #{production.name.inspect}"
49
+ end
50
+
51
+ def to_s
52
+ "Reduce with #{production}"
53
+ end
54
+ end
55
+ end
@@ -0,0 +1,58 @@
1
+ module Dhaka
2
+ # Represents channels for pumping of lookaheads between items
3
+ class Channel #:nodoc:
4
+ attr_reader :start_item, :end_item
5
+
6
+ def initialize(grammar, start_item, end_item)
7
+ @grammar = grammar
8
+ @start_item = start_item
9
+ @end_item = end_item
10
+ end
11
+
12
+ def propagate cargo
13
+ initial_size = end_item.lookaheadset.size
14
+ end_item.lookaheadset.merge(cargo)
15
+ (end_item.lookaheadset.size - initial_size) > 0
16
+ end
17
+
18
+ def to_s
19
+ "Channel from #{start_item} to #{end_item}"
20
+ end
21
+
22
+ def eql? other
23
+ start_item.eql?(other.start_item) && end_item.eql?(other.end_item)
24
+ end
25
+
26
+ def hash
27
+ start_item.hash ^ end_item.hash
28
+ end
29
+ end
30
+
31
+ class SpontaneousChannel < Channel #:nodoc:
32
+ def to_s
33
+ "Spontaneous " + super
34
+ end
35
+
36
+ def pump
37
+ follow_index = start_item.next_item_index + 1
38
+ cargo = Set.new
39
+ while follow_symbol = start_item.production.expansion[follow_index]
40
+ cargo.merge @grammar.first(follow_symbol)
41
+ return propagate(cargo) unless follow_symbol.nullable
42
+ follow_index += 1
43
+ end
44
+ cargo.merge start_item.lookaheadset
45
+ propagate cargo
46
+ end
47
+ end
48
+
49
+ class PassiveChannel < Channel #:nodoc:
50
+ def to_s
51
+ "Passive " + super
52
+ end
53
+
54
+ def pump
55
+ propagate start_item.lookaheadset
56
+ end
57
+ end
58
+ end
@@ -0,0 +1,51 @@
1
+ module Dhaka
2
+ # Abstract base class of all compiled Parsers. It is only used by generated code.
3
+ class CompiledParser
4
+
5
+ class << self
6
+ private
7
+ def inherited(compiled_parser)
8
+ class << compiled_parser
9
+ attr_accessor :states, :grammar, :start_state_id, :shift_actions, :reduce_actions
10
+ end
11
+ compiled_parser.states = Hash.new do |hash, state_id|
12
+ hash[state_id] = ParserState.new(compiled_parser, {}, state_id)
13
+ end
14
+ compiled_parser.shift_actions = Hash.new do |hash, state_id|
15
+ hash[state_id] = ShiftAction.new(compiled_parser.states[state_id])
16
+ end
17
+ compiled_parser.reduce_actions = Hash.new do |hash, production_name|
18
+ hash[production_name] = ReduceAction.new(compiled_parser.grammar.production_named(production_name))
19
+ end
20
+ end
21
+
22
+ def at_state x, &blk
23
+ states[x].instance_eval(&blk)
24
+ end
25
+
26
+ def start_state
27
+ states[start_state_id]
28
+ end
29
+
30
+ def start_with start_state_id
31
+ self.start_state_id = start_state_id
32
+ end
33
+
34
+ def reduce_with production_name
35
+ ReduceAction.new(grammar.production_named(production_name))
36
+ end
37
+
38
+ def shift_to state_id
39
+ ShiftAction.new(states[state_id])
40
+ end
41
+
42
+ def inspect
43
+ "<Dhaka::CompiledParser grammar : #{grammar}>"
44
+ end
45
+ end
46
+
47
+ extend(ParserMethods)
48
+
49
+ end
50
+
51
+ end
@@ -0,0 +1,54 @@
1
+ module Dhaka
2
+ # Represents parser shift-reduce and reduce-reduce conflicts and encapsulates logic for resolving them.
3
+ class Conflict #:nodoc:
4
+ def initialize(state, lookahead, new_action)
5
+ @lookahead, @new_action, @state = lookahead, new_action, state
6
+ end
7
+
8
+ def build_conflict_message
9
+ lines = ["Parser Conflict at State:", @state.to_s(:hide_lookaheads => true)]
10
+ lines << "Existing: #{@state.actions[@lookahead.name]}"
11
+ lines << "New: #{@new_action}"
12
+ lines << "Lookahead: #{@lookahead}"
13
+ lines.join("\n")
14
+ end
15
+ end
16
+
17
+
18
+ class ReduceReduceConflict < Conflict #:nodoc:
19
+ def resolve
20
+ build_conflict_message
21
+ end
22
+ end
23
+
24
+ class ShiftReduceConflict < Conflict #:nodoc:
25
+
26
+ def resolve
27
+ lines = [build_conflict_message]
28
+ shift_precedence = @lookahead.precedence
29
+ reduce_precedence = @new_action.production.precedence
30
+ if shift_precedence && reduce_precedence
31
+ if shift_precedence > reduce_precedence
32
+ lines << "Resolving with precedence. Choosing shift over reduce."
33
+ elsif shift_precedence < reduce_precedence
34
+ lines << "Resolving with precedence. Choosing reduce over shift."
35
+ @state.actions[@lookahead.name] = @new_action
36
+ else
37
+ case shift_precedence.associativity
38
+ when :left
39
+ lines << "Resolving with left associativity. Choosing reduce over shift."
40
+ @state.actions[@lookahead.name] = @new_action
41
+ when :right
42
+ lines << "Resolving with right associativity. Choosing shift over reduce."
43
+ when :nonassoc
44
+ lines << "Resolving with non-associativity. Eliminating action."
45
+ @state.actions.delete(@lookahead.name)
46
+ end
47
+ end
48
+ else
49
+ lines << "No precedence rule. Choosing shift over reduce."
50
+ end
51
+ lines.join("\n")
52
+ end
53
+ end
54
+ end
@@ -0,0 +1,42 @@
1
+ module Dhaka
2
+ # Represents parser state items
3
+ class Item #:nodoc:
4
+ attr_reader :production, :next_item_index, :lookaheadset
5
+
6
+ def initialize(production, next_item_index)
7
+ @production = production
8
+ @next_item_index = next_item_index
9
+ @lookaheadset = Set.new
10
+ end
11
+
12
+ def next_symbol
13
+ production.expansion[next_item_index]
14
+ end
15
+
16
+ def next_item
17
+ Item.new(production, @next_item_index + 1)
18
+ end
19
+
20
+ def to_s(options = {})
21
+ expansion_symbols = production.expansion.collect {|symbol| symbol.name}
22
+ if next_item_index < expansion_symbols.size
23
+ expansion_symbols.insert(next_item_index, '->')
24
+ else
25
+ expansion_symbols << '->'
26
+ end
27
+ expansion_repr = expansion_symbols.join(' ')
28
+
29
+ item = "#{production.symbol} ::= #{expansion_repr}"
30
+ item << " [#{lookaheadset.collect.sort}]" unless options[:hide_lookaheads]
31
+ item
32
+ end
33
+
34
+ def eql?(other)
35
+ production == other.production && next_item_index == other.next_item_index
36
+ end
37
+
38
+ def hash
39
+ production.hash ^ next_item_index.hash
40
+ end
41
+ end
42
+ end
@@ -0,0 +1,50 @@
1
+ module Dhaka
2
+ # Returned on successful parsing of the input token stream.
3
+ class ParseSuccessResult < DelegateClass(ParseTreeCompositeNode)
4
+ # Contains the parse result.
5
+ attr_accessor :parse_tree
6
+
7
+ def initialize(parse_tree) #:nodoc:
8
+ super
9
+ @parse_tree = parse_tree
10
+ end
11
+
12
+ # This is false.
13
+ def has_error?
14
+ false
15
+ end
16
+
17
+ # Returns the dot representation of the parse tree
18
+ def to_dot
19
+ Dot::Digraph.new(:fontsize => 10, :shape => :box, :size => 5) do |g|
20
+ parse_tree.to_dot(g)
21
+ end.to_dot
22
+ end
23
+
24
+ # Deprecated. Use the +parse_tree+ accessor.
25
+ alias syntax_tree parse_tree
26
+ end
27
+
28
+ # Returned on unsuccessful parsing of the input token stream.
29
+ class ParseErrorResult
30
+ attr_reader :unexpected_token, :parser_state
31
+
32
+ def initialize(unexpected_token, parser_state) #:nodoc:
33
+ @unexpected_token = unexpected_token
34
+ @parser_state = parser_state
35
+ end
36
+
37
+ # This is true.
38
+ def has_error?
39
+ true
40
+ end
41
+
42
+ def inspect #:nodoc:
43
+ "<Dhaka::ParseErrorResult unexpected_token=#{unexpected_token.inspect}>"
44
+ end
45
+ end
46
+ end
47
+
48
+
49
+
50
+
@@ -0,0 +1,66 @@
1
+ module Dhaka
2
+ # These are composite nodes of the syntax tree returned by the successful parsing of a token stream.
3
+ class ParseTreeCompositeNode
4
+ attr_reader :production, :child_nodes
5
+
6
+ def initialize(production) #:nodoc:
7
+ @production = production
8
+ @child_nodes = []
9
+ end
10
+ def linearize #:nodoc:
11
+ child_nodes.collect {|child_node| child_node.linearize}.flatten + [self]
12
+ end
13
+
14
+ def tokens
15
+ child_nodes.collect{|child_node| child_node.tokens}.flatten
16
+ end
17
+
18
+ def to_s #:nodoc:
19
+ "CompositeNode: #{production.symbol} --> [#{child_nodes.join(", ")}]"
20
+ end
21
+
22
+ # Returns the dot representation of this node.
23
+ def to_dot graph
24
+ graph.node(self, :label => production)
25
+ child_nodes.each do |child|
26
+ graph.edge(self, child)
27
+ child.to_dot(graph)
28
+ end
29
+ end
30
+
31
+ def head_node? #:nodoc:
32
+ production.symbol.name == START_SYMBOL_NAME
33
+ end
34
+
35
+ end
36
+
37
+ # These are leaf nodes of syntax trees. They contain tokens.
38
+ class ParseTreeLeafNode
39
+ attr_reader :token
40
+
41
+ def initialize(token) #:nodoc:
42
+ @token = token
43
+ end
44
+
45
+ def linearize #:nodoc:
46
+ []
47
+ end
48
+
49
+ def tokens
50
+ [token]
51
+ end
52
+
53
+ def to_s #:nodoc:
54
+ "LeafNode: #{token}"
55
+ end
56
+
57
+ # Returns the dot representation of this node.
58
+ def to_dot(graph)
59
+ graph.node(self, :label => token)
60
+ end
61
+
62
+ def head_node? #:nodoc:
63
+ false
64
+ end
65
+ end
66
+ end