search_lingo 1.0.3 → 2.0.0.pre1

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 473413fa017d4fe273ea20952b89c778fd07d98735efaeef1c28954bc70c96ea
4
- data.tar.gz: 11df6b8d3f02b8147afc5a7248a548861b038c8e7e9dffd854dca506e185fa7e
3
+ metadata.gz: e2e54fb0b61c4208e61b6edec4936b1fae5d37c9872c775ed2091f1ebbb0806f
4
+ data.tar.gz: 3d0f886c18e7f7acf5725d4c1101acd277805e8b8d02eaf0c524bd6f41b222cb
5
5
  SHA512:
6
- metadata.gz: a6783a0cb4a8d8db96ddaf4a9cff6704484a6421f93351de009202a7f680c46a50d44c62672e7201a7cdaeb3999ac9df7646b1905471cfef43ef2c9c2e6ef87d
7
- data.tar.gz: bed0ac621a52ffc67fc50ee32f5dd560f58a3f11444741bc3bae63a9be8f1d0e7e59e0656b5d1cab07cc2a05c1c200889d408c1c7142ceab31416a156edc9d38
6
+ metadata.gz: bee8e661b253b3e356b14d0a587ea019d4f8f9bcebf3090d7fbb6e12d410ddd85e29ceecaa41735a3b510a924ac119cd770500ad6c9828992c026b0e81e59bd2
7
+ data.tar.gz: ac76a97eb3ec3fd5c42c975462642b2d35366f7e29a1be7c22c32cf07f8f0442b9e79c7a4d4966826be56b6143cb293d8dd5831201166ff94d4b4fe5e72be579
data/README.md CHANGED
@@ -15,6 +15,52 @@ project. Although originally designed to work with basic searching with
15
15
  ActiveRecord models, it should be usable with other data stores provided they
16
16
  let you build complex queries by chaining together simpler queries.
17
17
 
18
+ ## Upgading
19
+
20
+ Version 2 introduces a breaking change to the parsing workflow. In older
21
+ versions, parsers were sent one argument (the token), and were expected to
22
+ return an array that would be sent to the scope using `#public_send`. The new
23
+ version sends the token and the filter chain to the parsers, and they are
24
+ expected to append methods to the filter chain and return the result. This
25
+ change makes it possible for parsers to make more than one addition to the
26
+ filter chain.
27
+
28
+ After upgrading, your parsers should be upgraded as follows:
29
+
30
+ ```ruby
31
+ # Before
32
+ parser do |token|
33
+ if token.modifier == 'something'
34
+ [:where, { column: token.term }]
35
+ end
36
+ end
37
+
38
+ # After
39
+ parser do |token, chain|
40
+ if token.modifier == 'something'
41
+ chain.where column: token.term
42
+ end
43
+ end
44
+ ```
45
+
46
+ Similar changes will need to be made to your `#default_parse` implementation.
47
+
48
+ ```ruby
49
+ # Before
50
+ def default_parse(token)
51
+ [:where, { column: token }]
52
+ end
53
+
54
+ # After
55
+ def default_parse(token, chain)
56
+ chain.where column: token
57
+ end
58
+ ```
59
+
60
+ If you provided your own implementation of `#scope` in your search class to
61
+ ensure that certain relations were joined, you may want to revisit the decision
62
+ in case the joins can be added only if needed by a particular parser.
63
+
18
64
  ## Installation
19
65
 
20
66
  Add this line to your application's Gemfile:
@@ -41,17 +87,16 @@ class Task < ActiveRecord::Base
41
87
  end
42
88
 
43
89
  class TaskSearch < SearchLingo::AbstractSearch
44
- def default_parse(token)
45
- [:where, 'tasks.name LIKE ?', "%#{token}%"]
90
+ def default_parse(token, chain)
91
+ chain.where 'name LIKE ?', "%#{token}%"
46
92
  end
47
93
  end
48
94
 
49
- TaskSearch.new('foo bar', Task).results
50
- # => Task.where('tasks.name LIKE ?', '%foo%')
51
- # -> .where('tasks.name LIKE ?', '%bar%')
95
+ TaskSearch.new('foo bar', Task.all).results
96
+ # => Task.where('name LIKE ?', '%foo%').where('name LIKE ?', '%bar%')
52
97
 
53
- TaskSearch.new('"foo bar"', Task).results
54
- # => Task.where('tasks.name LIKE ?', '%foo bar%')
98
+ TaskSearch.new('"foo bar"', Task.all).results
99
+ # => Task.where('name LIKE ?', '%foo bar%')
55
100
  ```
56
101
 
57
102
  And here is a more complex example.
@@ -68,91 +113,96 @@ end
68
113
  class Task < ActiveRecord::Base
69
114
  belongs_to :category
70
115
  belongs_to :user
71
- enum state: [:incomplete, :complete]
116
+ enum state: %i[incomplete complete]
72
117
  end
73
118
 
74
119
  class TaskSearch < SearchLingo::AbstractSearch
75
- parser do |token|
76
- token.match /\Acategory:\s*"?(.*?)"?\z/ do |m|
77
- [:where, { categories: { name: m[1] } }]
120
+ parser do |token, chain|
121
+ token.match(/\Ais:\s*(?<state>(?:in)?complete)\z/) do |m|
122
+ # Appends a named scope defined by `enum` to filter chain
123
+ chain.public_send m[:state].to_sym
78
124
  end
79
125
  end
80
126
 
81
- parser do |token|
82
- token.match /\Ais:\s*(?<state>(?:in)?complete)\z/ do |m|
83
- [m[:state].to_sym]
127
+ parser do |token, chain|
128
+ if token.modifier == 'cat'
129
+ # Appends a join and a where clause to the filter chain.
130
+ chain.joins(:category).where categories: { name: token.term }
84
131
  end
85
132
  end
86
133
 
87
- parser do |token|
88
- token.match /\A([<>])([[:digit:]]+)\z/ do |m|
89
- [:where, 'tasks.priority #{m[1]} ?', m[2]]
134
+ parser do |token, chain|
135
+ token.match(/\A(?<op>[<>])(?<prio>[[:digit:]]+)\z/) do |m|
136
+ priority = Task.arel_table[:priority]
137
+ if m[:op] == '<'
138
+ chain.where priority.lt m[:prio]
139
+ else
140
+ chain.where priority.gt m[:prio]
141
+ end
90
142
  end
91
143
  end
92
144
 
93
- def default_parse(token)
94
- [:where, 'tasks.name LIKE ?', "%#{token}%"]
95
- end
96
-
97
- def scope
98
- @scope.includes(:category).references(:category)
145
+ def default_parse(token, chain)
146
+ chain.where Task.arel_table[:name].matches "%#{token}%"
99
147
  end
100
148
  end
101
149
 
102
- TaskSearch.new('category: "foo bar" <2 baz is: incomplete', Task).results
103
- # => Task.includes(:category).references(:category)
104
- # -> .where(categories: { name: 'foo bar' })
105
- # -> .where('tasks.priority < ?', 2)
106
- # -> .where('tasks.name LIKE ?', '%baz%')
150
+ TaskSearch.new('cat: foo <2 "bar baz" is: incomplete', Task.all).results
151
+ # => Task.all
152
+ # -> .joins(:category)
153
+ # -> .where(categories: { name: 'foo' })
154
+ # -> .where(Task.arel_table[:priority].gt(2))
155
+ # -> .where(Task.arel_table[:name].matches('%bar baz%'))
107
156
  # -> .incomplete
108
157
 
109
- TaskSearch.new('category: "foo bar"', User.find(42).tasks).results
110
- # => Task.includes(:category).references(:category)
111
- # -> .where(user_id: 42)
112
- # -> .where(categories: { name: 'foo bar' })
158
+ user = User.find 42
159
+ TaskSearch.new('is: complete "foo bar"', user.tasks).results
160
+ # => user.tasks.complete.where(Task.arel_table[:name].matches('%foo bar%'))
113
161
  ```
114
162
 
115
- Create a class which inherits from `SearchLingo::AbstractSearch`. Provide an
116
- implementation of `#default_parse` in that class. Register parsers for specific
117
- types of search tokens using the parser class method.
163
+ A search class should inherit from `SearchLingo::AbstractSearch`, and it should
164
+ provide its own implementation of `#default_parse`. Register additional parsers
165
+ with `.parser` as needed.
118
166
 
119
- Instantiate your search class by passing in the query string and the scope on
120
- which to perform the search. Use the `#results` method to compile and execute
121
- the search and return the results.
167
+ Instantiate your search class with a query string and the scope on which to
168
+ search. Send that instance `#results` to compile and execute the search and
169
+ return the results.
122
170
 
123
171
  ## How It Works
124
172
 
125
- A search is instantiated with a query string and a search scope (commonly an
173
+ A search is instantiated with a query string and a search scope (such as an
126
174
  ActiveRecord model). The search breaks the query string down into a series of
127
- tokens, and each token is processed by a declared series of parsers. If a
128
- parser succeeds, processing immediately advances to the next token. If none of
129
- the declared parsers succeeds, and the token is compound — that is, the token
130
- is composed of a modifier and a term (e.g., `foo: bar`), the token is
131
- simplified and then processed by the declared parsers again. If the second pass
132
- also fails, then the (now simplified) token falls through to the
133
- `#default_parse` method defined by the search class. This method should be
134
- implemented in such a way that it always "succeeds" always returning a Symbol
135
- or an Array that can be splatted and sent to the search scope.
175
+ tokens and parses them, composing the search query by chaining method calls
176
+ onto the initial search scope.
177
+
178
+ A search class registers zero or more special-case parsers. Processing of each
179
+ token runs through the parsers in the order in which they were registered.
180
+ Parsing of a single token halts when a parser succeeds. When a parser succeeds,
181
+ it should append to the search scope a method call which implements the filter
182
+ for the given token. When a parser fails, it should return a `nil` or `false`.
183
+
184
+ If all of the registered parsers fail, and the token is compound, it is
185
+ simplified and reprocessed by the same set of parsers (see "Tokenization" for
186
+ more information).
187
+
188
+ If still no parser has successfully parsed the token, it falls back on the
189
+ `#default_parse`.
136
190
 
137
191
  ## Search Classes
138
192
 
139
- Search classes should inherit from `SearchLingo::AbstractSearch`, and they must
140
- provide their own implementation of `#default_parse`. Optionally, a search
141
- class may also use the parse class method to add specialized parsers for
142
- handling tokens that match specific patterns. As each token is processed, the
143
- search class will first run through the specialized parsers. If none of them
144
- succeed, it will fall back on the `#default_parse` method. See the section
145
- "Parsing" for more information on how parsers work and how they should be
146
- structured.
193
+ Search classes should inherit from `SearchLingo::AbstractSearch`. They must
194
+ provide their own implementation of `#default_parse` which should probably, at
195
+ a minimum, return the current filter chain. Custom parsers can be registered
196
+ with the `.parser` class method. Custom parsers are tried in the same order in
197
+ which they are defined. Bear this in mind when defining parsers.
147
198
 
148
199
  ## Tokenization
149
200
 
150
201
  Queries are comprised of zero or more tokens separated by white space. A token
151
- has a term and an optional modifier. (A simple token has no modifier; a
152
- compound token does.) A term can be a single word or multiple words joined by
153
- spaces and contained within double quotes. For example `foo` and `"foo bar
154
- baz"` are both single terms. A modifier is one or more alphanumeric characters
155
- followed by a colon and zero or more spaces.
202
+ is an optional modifier followed by a term. A modifier is one or more
203
+ alphanumeric characters and is followed by a colon. A term can be a single word
204
+ or multiple words contained within double quotes (both `foo` and `"foo bar
205
+ baz"` are valid single terms).
156
206
 
157
207
  QUERY := TOKEN*
158
208
  TOKEN := (MODIFIER ':' [[:space:]]*)? TERM
@@ -190,19 +240,19 @@ token.match(/\Afoo:\s*"?(.+?)"?\z/) { |m| m[1] } # => 'bar baz'
190
240
 
191
241
  ## Parsers
192
242
 
193
- Any object that can respond to the `#call` method can be used as a parser. If
194
- the parser succeeds, it should return an Array of arguments that can be sent to
195
- the query object using `#public_send`, e.g., `[:where, { id: 42 }]`. If the
196
- parser fails, it should return a falsey value.
243
+ Any object that responds to `#call` can be used as a parser. It will be sent
244
+ two arguments: the token and the current filter chain. If a parser succeeds, it
245
+ should append one or more methods to the filter chain and return the result. If
246
+ a parser fails, it should return a falsey value (usually `nil`).
197
247
 
198
248
  For very simple parsers which need not be reusable, you can pass the parsing
199
249
  logic to the parser method as a block:
200
250
 
201
251
  ```ruby
202
252
  class MySearch < SearchLingo::AbstractSearch
203
- parser do |token|
204
- token.match /\Aid:[[:space:]]*([[:digit:]]+)\z/ do |m|
205
- [:where, { id: m[1] }]
253
+ parser do |token, chain|
254
+ token.match(/\Aid:[[:space:]]*([[:digit:]]+)\z/) do |m|
255
+ chain.where id: m[1]
206
256
  end
207
257
  end
208
258
  end
@@ -212,9 +262,9 @@ If you want to re-use a parser, you could implement it as a lambda:
212
262
 
213
263
  ```ruby
214
264
  module Parsers
215
- ID_PARSER = lambda do |token|
216
- token.match h/\Aid:[[:space:]]*([[:digit:]]+)\z/ do |m|
217
- [:where, { id: m[1] }]
265
+ ID_PARSER = lambda do |token, chain|
266
+ token.match(/\Aid:[[:space:]]*([[:digit:]]+)\z/) do |m|
267
+ chain.where id: m[1]
218
268
  end
219
269
  end
220
270
  end
@@ -228,38 +278,76 @@ class MyOtherSearch < SearchLingo::AbstractSearch
228
278
  end
229
279
  ```
230
280
 
231
- Finally, for the most complicated cases, you could implement parsers as
232
- classes:
281
+ For more complex cases, you may choose to implement a parser as its own class.
233
282
 
234
283
  ```ruby
235
284
  module Parsers
236
- class IdParser
237
- def initialize(table, modifier = nil)
238
- @table = table
239
- @prefix = /#{modifier}:\s*/ if modifier
285
+ class DateParser
286
+ US_DATE = %r{(?<m>\d{1,2})/(?<d>\d{1,2})/(?<y>\d{4})}
287
+
288
+ attr_reader :column
289
+
290
+ def initialize(column)
291
+ @column = column
240
292
  end
241
293
 
242
- def call(token)
243
- token.match /\A#{@prefix}([[:digit:]]+)\z/ do |m|
244
- [:where, { @table => { id: m[1] } }]
294
+ def call(token, chain)
295
+ catch :halt do
296
+ parse_simple_date token, chain
297
+ parse_date_range token, chain
298
+ parse_open_date_range token, chain
245
299
  end
246
300
  end
247
- end
248
- end
249
301
 
250
- class EventSearch < SearchLingo::AbstractSearch
251
- # matches "42" and adds events.id=42 as a condition
252
- parser Parsers::IdParser.new Event.table_name
302
+ private
303
+
304
+ # Parses simple dates like "10/2/2018"
305
+ def parse_simple_date(token, chain)
306
+ token.match(/\A#{US_DATE}\z/) do |m|
307
+ date = Date.parse '%04d-%02d-%02d' % m.values_at(:y, :m, :d)
308
+ throw :halt, chain.where(column.eq(date))
309
+ end
310
+ rescue ArgumentError
311
+ # Raised by Date.parse for invalid dates
312
+ end
313
+
314
+ # Parses date ranges like "10/1/2018-10/31/2018"
315
+ def parse_date_range(token, chain)
316
+ token.match(/\A#{US_DATE}-#{US_DATE}\z/) do |m|
317
+ min = Date.parse '%04d-%02d-%02d' % m.values_at(3, 1, 2)
318
+ max = Date.parse '%04d-%02d-%02d' % m.values_at(6, 4, 3)
319
+ throw :halt, chain.where(column.in(min..max))
320
+ end
321
+ rescue ArgumentError
322
+ # Raised by Date.parse for invalid dates
323
+ end
253
324
 
254
- # matches "category: 42" and adds categories.id as a condition
255
- parser Parsers::IdParser.new Category.table_name, 'category'
325
+ # Parses open-ended date ranges like "10/1/2018-" or "-10/31/2018"
326
+ def parse_open_date_range(token, chain)
327
+ token.match(/\A(?<min>#{US_DATE})-|-(?<max>#{US_DATE})\z) do |m|
328
+ if m[:min]
329
+ date = Date.parse '%04d-%02d-%02d' % m.values_at(:y, :m, :d)
330
+ throw :halt, chain.where(column.gteq(date))
331
+ else
332
+ date = Date.parse '%04d-%02d-%02d' % m.values_at(:y, :m, :d)
333
+ throw :halt, chain.where(column.lteq(date))
334
+ end
335
+ end
336
+ rescue ArgumentError
337
+ # Raised by Date.pares for invalid dates
338
+ end
339
+ end
256
340
  end
257
341
 
258
- class CategorySearch < SearchLingo::AbstractSearch
259
- parser Parsers::IdParser.new :categories
342
+ class EventSearch < SearchLingo::AbstractSearch
343
+ parser Parsers::DateParser.new
260
344
  end
261
345
  ```
262
346
 
347
+ (Date parsing was a convenient example of a parser complex enough to warrant
348
+ its own class, but a date parser is included with the gem. See "Date Parsers"
349
+ below for more information.)
350
+
263
351
  ### Date Parsers
264
352
 
265
353
  One of the non-trivial parsing tasks I found myself constantly reimplementing
data/examples/complex.rb CHANGED
@@ -12,9 +12,9 @@ module Parsers # :nodoc:
12
12
  @table = table
13
13
  end
14
14
 
15
- def call(token)
15
+ def call(token, chain)
16
16
  token.match /\Aid:\s*([[:digit:]]+)\z/ do |m|
17
- [:where, { @table => { id: m[1] } }]
17
+ chain.where @table => { id: m[1] }
18
18
  end
19
19
  end
20
20
  end
@@ -24,8 +24,8 @@ class JobSearch < SearchLingo::AbstractSearch # :nodoc:
24
24
  parser SearchLingo::Parsers::DateParser.new Job.arel_table[:date]
25
25
  parser Parsers::IdParser.new Job.table_name
26
26
 
27
- def default_parse(token)
28
- [:where, Job.arel_table[:name].lower.like("%#{token}%")]
27
+ def default_parse(token, chain)
28
+ chain.where Job.arel_table[:name].matches "%#{token}%"
29
29
  end
30
30
  end
31
31
 
@@ -34,26 +34,28 @@ class ReceiptSearch < SearchLingo::AbstractSearch # :nodoc:
34
34
  parser SearchLingo::Parsers::DateParser.new Receipt.arel_table[:post_date],
35
35
  modifier: 'posted'
36
36
 
37
- parser do |token|
38
- token.match /\Aamount: (\d+(?:\.\d+)?)\z/ do |m|
39
- [:where, { receipts: { amount: m[1] } }]
37
+ parser do |token, chain|
38
+ token.match(/\Aamount: (\d+(?:\.\d+)?)\z/) do |m|
39
+ chain.where receipts: { amount: m[1] }
40
40
  end
41
41
  end
42
42
 
43
- def default_parse(token)
44
- [:where, Receipt.arel_table[:check_no].like(token)]
43
+ def default_parse(token, chain)
44
+ chain.where Receipt.arel_table[:check_no].matches token
45
45
  end
46
46
  end
47
47
 
48
48
  search = JobSearch.new('6/4/15-6/5/15 id: 42 "foo bar"')
49
49
  search.results
50
- # => Job.where(Job.arel_table[:date].in(Date.new(2015,6,4)..Date.new(2015,6,5)))
51
- # .where('jobs' => { id: '42' })
52
- # .where(Job.arel_table[:name].lower.like('%foo bar%'))
50
+ # => Job
51
+ # .where(Job.arel_table[:date].in(Date.new(2015,6,4)..Date.new(2015,6,5)))
52
+ # .where('jobs' => { id: '42' })
53
+ # .where(Job.arel_table[:name].matches('%foo bar%'))
53
54
 
54
55
  search = ReceiptSearch.new('-6/4/15 posted: 6/5/15- amount: 1000 123')
55
56
  search.results
56
- # => Receipt.where(Receipt.arel_table[:check_date].lteq(Date.new(2015,6,4)))
57
- # .where(Receipt.arel_table[:post_date].gteq(Date.new(2015,6,5)))
58
- # .where(receipts: { amount: '1000' })
59
- # .where(Receipt.arel_table[:check_no].matches('123'))
57
+ # => Receipt
58
+ # .where(Receipt.arel_table[:check_date].lteq(Date.new(2015, 6, 4)))
59
+ # .where(Receipt.arel_table[:post_date].gteq(Date.new(2015, 6, 5)))
60
+ # .where(receipts: { amount: '1000' })
61
+ # .where(Receipt.arel_table[:check_no].matches('123'))
@@ -29,9 +29,10 @@ class Task < Sequel::Model # :nodoc:
29
29
  end
30
30
 
31
31
  class CategoryParser # :nodoc:
32
- def call(token)
32
+ def call(token, chain)
33
33
  if token.modifier == 'cat'
34
- [:where, { Sequel.qualify('category', 'name') => token.term }]
34
+ chain.eager_graph(:category)
35
+ .where Sequel.qualify('category', 'name') => token.term
35
36
  end
36
37
  end
37
38
  end
@@ -43,9 +44,10 @@ class TaskSearch < SearchLingo::AbstractSearch # :nodoc:
43
44
  #
44
45
  # <2 => Categories with priority < 2
45
46
  # >5 => Categories with priority > 5
46
- parser do |token|
47
+ parser do |token, chain|
47
48
  token.match /\A([<>])([[:digit:]]+)\z/ do |m|
48
- [:where, Sequel.expr { priority.send m[1], m[2] }]
49
+ chain.eager_graph(:category)
50
+ .where Sequel.expr { priority.send m[1], m[2] }
49
51
  end
50
52
  end
51
53
 
@@ -53,12 +55,14 @@ class TaskSearch < SearchLingo::AbstractSearch # :nodoc:
53
55
  #
54
56
  # 7/4/1776 => Tasks with due_date == Date.new(1776, 7, 4)
55
57
  # 7/4/17 => Tasks with due_date == Date.new(2017, 7, 4)
56
- parser do |token|
57
- token.match %r{\A(?<m>\d{1,2})/(?<d>\d{1,2})/(?<y>\d{2}\d{2}?)\z} do |m|
58
- begin
59
- [:where, { due_date: Date.parse("#{m[:y]}/#{m[:m]}/#{m[:d]}") }]
60
- rescue ArgumentError
61
- end
58
+ parser do |token, chain|
59
+ token.match %r{\A(\d{1,2})/(\d{1,2})/(\d{2}\d{2}?)\z} do |m|
60
+ date = begin
61
+ Date.parse '%d/%d/%d' % m.values_at(3, 1, 2)
62
+ rescue ArgumentError
63
+ return nil
64
+ end
65
+ chain.where due_date: date
62
66
  end
63
67
  end
64
68
 
@@ -66,11 +70,7 @@ class TaskSearch < SearchLingo::AbstractSearch # :nodoc:
66
70
  #
67
71
  # pay bills => Match tasks with names like "pay bills", "pay bills by today"
68
72
  # brush teeth => Match tasks with names like "brush teeth", "brush teeth and floss"
69
- def default_parse(token)
70
- [:where, Sequel.lit('tasks.name LIKE ?', "%#{token.term}%")]
71
- end
72
-
73
- def scope
74
- @scope.eager_graph(:category)
73
+ def default_parse(token, chain)
74
+ chain.where Sequel.lit 'tasks.name LIKE ?', "%#{token.term}%"
75
75
  end
76
76
  end
@@ -2,7 +2,7 @@ require 'search_lingo/tokenizer'
2
2
 
3
3
  module SearchLingo
4
4
  class AbstractSearch
5
- attr_reader :query
5
+ attr_reader :query, :scope
6
6
 
7
7
  ##
8
8
  # Instantiates a new search object. +query+ is the string that is to be
@@ -62,42 +62,27 @@ module SearchLingo
62
62
  end
63
63
 
64
64
  ##
65
- # Constructs and performs the query.
66
- def load_results
67
- conditions.inject(scope) do |query, condition|
68
- query.public_send(*condition)
69
- end
70
- end
71
-
72
- ##
73
- # Returns an +Array+ of compiled query parameters.
65
+ # Load search results by composing query string tokens into a query chain.
74
66
  #
75
- # @query is broken down into tokens, and each token is passed through the
67
+ # @query is borken down into tokens, and each token is passed through the
76
68
  # list of defined parsers. If a parser is successful, +:match+ is thrown,
77
- # the compiled condition is saved, and processing moves on to the next
78
- # token. If none of the parsers succeeds and the token is compound, that
79
- # is, it has both a modifier and a term, the token is simplified, and
80
- # reprocessed through the list of parsers. As during the first pass, if a
81
- # parser succeeds, +:match+ is thrown, the compiled condition for the now
82
- # simplified token is saved, and processing moves on to the next token (the
83
- # remains of the original compound token). If none of the parsers succeeds
84
- # during the second pass, the now simplified token is finally sent to
85
- # +#default_parse+, and whatever it returns will be saved as the compiled
86
- # condition.
87
- def conditions
88
- tokenizer.inject([]) do |conditions, token|
89
- conditions << catch(:match) do
90
- # 1. Try each parser with the token until :match is thrown.
91
- parse token
69
+ # processing moves on to the next token. If none of the parsers succeed and
70
+ # the token is compound, the token is simplified and reprocessed as before.
71
+ # If still no parser succeeds, fall back on +#default_parse+.
72
+ def load_results
73
+ tokenizer.reduce(scope) do |chain, token|
74
+ catch(:match) do
75
+ # 1. Try each parser with token until :match is thrown.
76
+ parse token, chain
92
77
 
93
- # 2. If :match not thrown and token is compound, simplify and try again.
78
+ # 2. If :match not thrown and token is compund, simplify and retry.
94
79
  if token.compound?
95
80
  token = tokenizer.simplify
96
- parse token
81
+ parse token, chain
97
82
  end
98
83
 
99
- # 3. If :match still not thrown, fallback on default parser.
100
- default_parse token
84
+ # 3. If :match still not thrown, fall back on default parser.
85
+ default_parse token, chain
101
86
  end
102
87
  end
103
88
  end
@@ -110,37 +95,29 @@ module SearchLingo
110
95
 
111
96
  ##
112
97
  # Passes +token+ to each parser in turn. If a parser succeeds, throws
113
- # +:match+ with the compiled result.
98
+ # +:match+ with the result.
114
99
  #
115
- # A parser succeeds if +call+ returns a truthy value. The return value of a
116
- # successful parser will be splatted and sent to @scope using
117
- # +public_send+.
118
- def parse(token)
100
+ # A parser succeeds if +call+ returns a truthy value. A successful parser
101
+ # will typically send something to +chain+ and return the result. In this
102
+ # way, the tokens of the search are reduced into a composed query.
103
+ def parse(token, chain)
119
104
  parsers.each do |parser|
120
- result = parser.call token
105
+ result = parser.call token, chain
121
106
  throw :match, result if result
122
107
  end
123
108
  nil
124
109
  end
125
110
 
126
111
  ##
127
- # Raises +NotImplementedError+. Classes which inherit from
128
- # SearchLingo::AbstractSearch must provide their own implementation, and it
129
- # should *always* succeed.
130
- def default_parse(token)
112
+ # The default way to handle a token which could not be parsed by any of the
113
+ # other parsers.
114
+ #
115
+ # This is a skeletal implementation that raises +NotImplementedError+.
116
+ # Child classes should provide their own implementation. At a minimum, that
117
+ # implementation should return +chain+. (Doing so would ignore +token+.)
118
+ def default_parse(token, chain)
131
119
  raise NotImplementedError,
132
120
  "#default_parse must be implemented by #{self.class}"
133
121
  end
134
-
135
- ##
136
- # Returns @scope.
137
- #
138
- # You may override this method in your search class if you want to ensure
139
- # additional messages are sent to search scope before executing the query.
140
- # For example, if @scope is an +ActiveRecord+ model, you might want to join
141
- # additional tables.
142
- def scope
143
- @scope
144
- end
145
122
  end
146
123
  end
@@ -29,11 +29,13 @@ module SearchLingo
29
29
  # Examples of single dates are 7/14, 7/14/17, and 7/14/2017.
30
30
  # Examples of closed date ranges are 1/1-6/30 and 7/1/16-6/30/18.
31
31
  # Examples of open date ranges are -6/30 and 7/1/17-.
32
- def call(token)
33
- parse_single_date(token) ||
34
- parse_date_range(token) ||
35
- parse_lte_date(token) ||
36
- parse_gte_date(token)
32
+ def call(token, chain)
33
+ catch :stop do
34
+ parse_single_date token, chain
35
+ parse_date_range token, chain
36
+ parse_lte_date token, chain
37
+ parse_gte_date token, chain
38
+ end
37
39
  end
38
40
 
39
41
  def inspect # :nodoc:
@@ -43,32 +45,32 @@ module SearchLingo
43
45
 
44
46
  private
45
47
 
46
- def parse_single_date(token)
47
- token.match /\A#{prefix}(?<date>#{US_DATE})\z/ do |m|
48
+ def parse_single_date(token, chain)
49
+ token.match(/\A#{prefix}(?<date>#{US_DATE})\z/) do |m|
48
50
  date = parse(m[:date]) or return nil
49
- [:where, column.eq(date)]
51
+ throw :stop, chain.where(column.eq(date))
50
52
  end
51
53
  end
52
54
 
53
- def parse_date_range(token)
54
- token.match /\A#{prefix}(?<min>#{US_DATE})-(?<max>#{US_DATE})\z/ do |m|
55
+ def parse_date_range(token, chain)
56
+ token.match(/\A#{prefix}(?<min>#{US_DATE})-(?<max>#{US_DATE})\z/) do |m|
55
57
  min = parse(m[:min]) or return nil
56
58
  max = parse(m[:max], relative_to: min.next_year) or return nil
57
- [:where, column.in(min..max)]
59
+ throw :stop, chain.where(column.in(min..max))
58
60
  end
59
61
  end
60
62
 
61
- def parse_lte_date(token)
62
- token.match /\A#{prefix}-(?<date>#{US_DATE})\z/ do |m|
63
+ def parse_lte_date(token, chain)
64
+ token.match(/\A#{prefix}-(?<date>#{US_DATE})\z/) do |m|
63
65
  date = parse(m[:date]) or return nil
64
- [:where, column.lteq(date)]
66
+ throw :stop, chain.where(column.lteq(date))
65
67
  end
66
68
  end
67
69
 
68
- def parse_gte_date(token)
69
- token.match /\A#{prefix}(?<date>#{US_DATE})-\z/ do |m|
70
+ def parse_gte_date(token, chain)
71
+ token.match(/\A#{prefix}(?<date>#{US_DATE})-\z/) do |m|
70
72
  date = parse(m[:date]) or return nil
71
- [:where, column.gteq(date)]
73
+ throw :stop, chain.where(column.gteq(date))
72
74
  end
73
75
  end
74
76
  end
@@ -24,7 +24,7 @@ module SearchLingo
24
24
  # Available as both a class method and an instance method.
25
25
  def parse(term, relative_to: Date.today)
26
26
  term.match /\A#{US_DATE}\z/ do |m|
27
- return Date.parse "#{m[:y]}/#{m[:m]}/#{m[:d]}" if m[:y]
27
+ return Date.parse '%d/%d/%d' % m.values_at(:y, :m, :d) if m[:y]
28
28
 
29
29
  ref = relative_to
30
30
  day = Integer(m[:d])
@@ -1,3 +1,3 @@
1
1
  module SearchLingo
2
- VERSION = '1.0.3'
2
+ VERSION = '2.0.0.pre1'
3
3
  end
data/search_lingo.gemspec CHANGED
@@ -26,6 +26,8 @@ Gem::Specification.new do |spec|
26
26
  spec.add_development_dependency "bundler", "~> 1.9"
27
27
  spec.add_development_dependency "rake", "~> 10.0"
28
28
  spec.add_development_dependency 'minitest'
29
+ spec.add_development_dependency 'minitest-focus'
30
+ spec.add_development_dependency 'mocha'
29
31
  spec.add_development_dependency 'pry'
30
32
  spec.add_development_dependency 'sequel', '~> 4.48'
31
33
  spec.add_development_dependency 'sqlite3'
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: search_lingo
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.0.3
4
+ version: 2.0.0.pre1
5
5
  platform: ruby
6
6
  authors:
7
7
  - John Parker
@@ -52,6 +52,34 @@ dependencies:
52
52
  - - ">="
53
53
  - !ruby/object:Gem::Version
54
54
  version: '0'
55
+ - !ruby/object:Gem::Dependency
56
+ name: minitest-focus
57
+ requirement: !ruby/object:Gem::Requirement
58
+ requirements:
59
+ - - ">="
60
+ - !ruby/object:Gem::Version
61
+ version: '0'
62
+ type: :development
63
+ prerelease: false
64
+ version_requirements: !ruby/object:Gem::Requirement
65
+ requirements:
66
+ - - ">="
67
+ - !ruby/object:Gem::Version
68
+ version: '0'
69
+ - !ruby/object:Gem::Dependency
70
+ name: mocha
71
+ requirement: !ruby/object:Gem::Requirement
72
+ requirements:
73
+ - - ">="
74
+ - !ruby/object:Gem::Version
75
+ version: '0'
76
+ type: :development
77
+ prerelease: false
78
+ version_requirements: !ruby/object:Gem::Requirement
79
+ requirements:
80
+ - - ">="
81
+ - !ruby/object:Gem::Version
82
+ version: '0'
55
83
  - !ruby/object:Gem::Dependency
56
84
  name: pry
57
85
  requirement: !ruby/object:Gem::Requirement
@@ -141,9 +169,9 @@ required_ruby_version: !ruby/object:Gem::Requirement
141
169
  version: '2.1'
142
170
  required_rubygems_version: !ruby/object:Gem::Requirement
143
171
  requirements:
144
- - - ">="
172
+ - - ">"
145
173
  - !ruby/object:Gem::Version
146
- version: '0'
174
+ version: 1.3.1
147
175
  requirements: []
148
176
  rubyforge_project:
149
177
  rubygems_version: 2.7.7