keyword_search 1.0.6 → 1.1.0

Sign up to get free protection for your applications and to get access to all the features.
data/History.txt CHANGED
@@ -1,3 +1,7 @@
1
+ = 1.1.0 / 2007-03-21
2
+
3
+ * Updated to use Dhaka 2.1.0
4
+
1
5
  = 1.0.5 / 2007-01-29
2
6
 
3
7
  * Added single quoting support and nested quotes (apostrophes, etc)
data/Rakefile CHANGED
@@ -12,7 +12,7 @@ Hoe.new('keyword_search', KeywordSearch::VERSION) do |p|
12
12
  p.author = 'Bruce Williams'
13
13
  p.email = 'bruce@codefluency.com'
14
14
  p.changes = p.paragraphs_of('History.txt', 0..1).join("\n\n")
15
- p.extra_deps = [['dhaka', '= 0.0.6']]
15
+ p.extra_deps = [['dhaka', '= 2.1.0']]
16
16
  end
17
17
 
18
18
  task :rebuild_parser do
@@ -7,15 +7,16 @@ end
7
7
 
8
8
  module KeywordSearch
9
9
 
10
- VERSION = '1.0.6'
10
+ VERSION = '1.1.0'
11
11
 
12
12
  class << self
13
13
  def search(input_string, definition=nil, &block)
14
+ @evaluator ||= Evaluator.new
14
15
  definition ||= Definition.new(&block)
15
16
  tokens = Tokenizer.tokenize(input_string.downcase)
16
17
  parse_result = Parser.parse(tokens)
17
18
  unless parse_result.has_error?
18
- results = Evaluator.new(parse_result.syntax_tree).result
19
+ results = @evaluator.evaluate(parse_result.parse_tree)
19
20
  results.each do |key, terms|
20
21
  definition.handle(key, terms)
21
22
  end
@@ -7,23 +7,25 @@ module KeywordSearch
7
7
  define_evaluation_rules do
8
8
 
9
9
  for_multiple_pairs do
10
- child_nodes[1].each do |key, terms|
11
- child_nodes[0][key] ||= []
12
- child_nodes[0][key] += terms
10
+ child_nodes.inject({}) do |result,child_node|
11
+ evaluate(child_node).each do |key,value|
12
+ result[key] ||= []
13
+ result[key] += value
14
+ end
15
+ result
13
16
  end
14
- child_nodes[0]
15
17
  end
16
18
 
17
19
  for_one_pair do
18
- child_nodes[0]
20
+ evaluate(child_nodes.first)
19
21
  end
20
22
 
21
23
  for_keyword_and_term do
22
- {child_nodes[0].token.value => [child_nodes[1].token.value]}
24
+ {child_nodes.first.tokens.first.value => [child_nodes.last.tokens.first.value]}
23
25
  end
24
26
 
25
27
  for_default_keyword_term do
26
- {:default => [child_nodes[0].token.value]}
28
+ {:default => [child_nodes[0].tokens[0].value]}
27
29
  end
28
30
 
29
31
  end
@@ -4,46 +4,38 @@ class KeywordSearch::Parser < Dhaka::CompiledParser
4
4
 
5
5
  start_with 0
6
6
 
7
- at_state(5) {
8
- for_symbol('k') { shift_to 2 }
9
- for_symbol('_End_') { reduce_with 'start' }
10
- for_symbol('Pair') { shift_to 6 }
11
- for_symbol('s') { shift_to 1 }
12
- }
13
-
14
- at_state(2) {
15
- for_symbol('s') { shift_to 3 }
7
+ at_state(3) {
8
+ for_symbols("k", "_End_", "s") { reduce_with "one_pair" }
16
9
  }
17
10
 
18
- at_state(3) {
19
- for_symbol('k') { reduce_with 'keyword_and_term' }
20
- for_symbol('_End_') { reduce_with 'keyword_and_term' }
21
- for_symbol('s') { reduce_with 'keyword_and_term' }
11
+ at_state(1) {
12
+ for_symbols("s") { shift_to 2 }
22
13
  }
23
14
 
24
15
  at_state(6) {
25
- for_symbol('k') { reduce_with 'multiple_pairs' }
26
- for_symbol('_End_') { reduce_with 'multiple_pairs' }
27
- for_symbol('s') { reduce_with 'multiple_pairs' }
16
+ for_symbols("k", "_End_", "s") { reduce_with "multiple_pairs" }
28
17
  }
29
18
 
30
- at_state(4) {
31
- for_symbol('k') { reduce_with 'one_pair' }
32
- for_symbol('_End_') { reduce_with 'one_pair' }
33
- for_symbol('s') { reduce_with 'one_pair' }
19
+ at_state(5) {
20
+ for_symbols("s") { shift_to 4 }
21
+ for_symbols("_End_") { reduce_with "start" }
22
+ for_symbols("k") { shift_to 1 }
23
+ for_symbols("Pair") { shift_to 6 }
34
24
  }
35
25
 
36
- at_state(1) {
37
- for_symbol('k') { reduce_with 'default_keyword_term' }
38
- for_symbol('_End_') { reduce_with 'default_keyword_term' }
39
- for_symbol('s') { reduce_with 'default_keyword_term' }
26
+ at_state(2) {
27
+ for_symbols("k", "_End_", "s") { reduce_with "keyword_and_term" }
40
28
  }
41
29
 
42
30
  at_state(0) {
43
- for_symbol('k') { shift_to 2 }
44
- for_symbol('Pairs') { shift_to 5 }
45
- for_symbol('Pair') { shift_to 4 }
46
- for_symbol('s') { shift_to 1 }
31
+ for_symbols("s") { shift_to 4 }
32
+ for_symbols("k") { shift_to 1 }
33
+ for_symbols("Pair") { shift_to 3 }
34
+ for_symbols("Pairs") { shift_to 5 }
35
+ }
36
+
37
+ at_state(4) {
38
+ for_symbols("k", "_End_", "s") { reduce_with "default_keyword_term" }
47
39
  }
48
40
 
49
41
  end
@@ -1,6 +1,18 @@
1
1
  module KeywordSearch
2
2
 
3
3
  class Tokenizer < Dhaka::Tokenizer
4
+
5
+ def accumulator
6
+ @accumulator ||= ''
7
+ end
8
+
9
+ def accumulate(string)
10
+ accumulator << string
11
+ end
12
+
13
+ def clear_accumulator
14
+ @accumulator = ''
15
+ end
4
16
 
5
17
 
6
18
  # TODO: Add further character support; this is just for initial release
@@ -16,13 +28,13 @@ module KeywordSearch
16
28
  for_state :idle_state do
17
29
 
18
30
  for_characters(printables) do
19
- self.accumulator = ''
31
+ clear_accumulator
20
32
  switch_to :unquoted_literal_state
21
33
  end
22
34
 
23
35
  for_characters(quotes) do
24
- advance if self.accumulator && !self.accumulator.empty?
25
- self.accumulator = ''
36
+ advance unless accumulator.empty?
37
+ clear_accumulator
26
38
  case curr_char
27
39
  when %<">
28
40
  advance
@@ -42,21 +54,21 @@ module KeywordSearch
42
54
  for_state :unquoted_literal_state do
43
55
 
44
56
  for_characters(printables) do
45
- self.accumulator += curr_char
57
+ accumulate curr_char
46
58
  advance
47
- tokens << Dhaka::Token.new(Grammar.symbol_for_name('s'), accumulator) unless curr_char
59
+ create_token('s', accumulator) unless curr_char
48
60
  end
49
61
 
50
62
  for_characters(keyword_separator) do
51
- tokens << Dhaka::Token.new(Grammar.symbol_for_name('k'), self.accumulator)
52
- self.accumulator = ''
63
+ create_token 'k', accumulator
64
+ clear_accumulator
53
65
  advance
54
66
  switch_to :idle_state
55
67
  end
56
68
 
57
69
  for_characters(whitespace) do
58
- tokens << Dhaka::Token.new(Grammar.symbol_for_name('s'), self.accumulator)
59
- self.accumulator = ''
70
+ create_token 's', accumulator
71
+ clear_accumulator
60
72
  switch_to :idle_state
61
73
  end
62
74
 
@@ -64,12 +76,12 @@ module KeywordSearch
64
76
 
65
77
  for_state :double_quoted_literal_state do
66
78
  for_characters(all_characters - %w<">) do
67
- self.accumulator += curr_char
79
+ accumulate curr_char
68
80
  advance
69
81
  end
70
82
  for_characters %w<"> do
71
- tokens << Dhaka::Token.new(Grammar.symbol_for_name('s'), self.accumulator)
72
- self.accumulator = ''
83
+ create_token 's', accumulator
84
+ clear_accumulator = ''
73
85
  advance
74
86
  switch_to :idle_state
75
87
  end
@@ -77,12 +89,12 @@ module KeywordSearch
77
89
 
78
90
  for_state :single_quoted_literal_state do
79
91
  for_characters(all_characters - %w<'>) do
80
- self.accumulator += curr_char
92
+ accumulate curr_char
81
93
  advance
82
94
  end
83
95
  for_characters %w<'> do
84
- tokens << Dhaka::Token.new(Grammar.symbol_for_name('s'), self.accumulator)
85
- self.accumulator = ''
96
+ create_token 's', accumulator
97
+ clear_accumulator
86
98
  advance
87
99
  switch_to :idle_state
88
100
  end
metadata CHANGED
@@ -3,8 +3,8 @@ rubygems_version: 0.9.0
3
3
  specification_version: 1
4
4
  name: keyword_search
5
5
  version: !ruby/object:Gem::Version
6
- version: 1.0.6
7
- date: 2007-02-26 00:00:00 -07:00
6
+ version: 1.1.0
7
+ date: 2007-03-21 00:00:00 -06:00
8
8
  summary: Generic support for extracting GMail-style search keywords/values from strings
9
9
  require_paths:
10
10
  - lib
@@ -60,5 +60,14 @@ dependencies:
60
60
  requirements:
61
61
  - - "="
62
62
  - !ruby/object:Gem::Version
63
- version: 0.0.6
63
+ version: 2.1.0
64
+ version:
65
+ - !ruby/object:Gem::Dependency
66
+ name: hoe
67
+ version_requirement:
68
+ version_requirements: !ruby/object:Gem::Version::Requirement
69
+ requirements:
70
+ - - ">="
71
+ - !ruby/object:Gem::Version
72
+ version: 1.2.0
64
73
  version: