ultrasphinx 1.5.3 → 1.6

Sign up to get free protection for your applications and to get access to all the features.
@@ -9,66 +9,79 @@ module Ultrasphinx
9
9
 
10
10
  def build_request_with_options opts
11
11
 
12
- request = Sphinx::Client.new
13
-
14
- request.SetServer(
15
- Ultrasphinx::CLIENT_SETTINGS['server_host'],
16
- Ultrasphinx::CLIENT_SETTINGS['server_port']
17
- )
18
-
19
- # Force extended query mode
20
- request.SetMatchMode(Sphinx::Client::SPH_MATCH_EXTENDED)
21
-
22
- offset, limit = opts['per_page'] * (opts['page'] - 1), opts['per_page']
23
-
24
- request.SetLimits offset, limit, [offset + limit, MAX_MATCHES].min
12
+ request = Riddle::Client.new
13
+ request.instance_eval do
14
+ @server = Ultrasphinx::CLIENT_SETTINGS['server_host']
15
+ @port = Ultrasphinx::CLIENT_SETTINGS['server_port']
16
+ @match_mode = :extended # Force extended query mode
17
+ @offset = opts['per_page'] * (opts['page'] - 1)
18
+ @limit = opts['per_page']
19
+ @max_matches = [@offset + @limit, MAX_MATCHES].min
20
+ end
21
+
22
+ # Sorting
23
+ sort_by = opts['sort_by']
24
+ unless sort_by.blank?
25
+ if opts['sort_mode'].to_s == 'relevance'
26
+ # If you're sorting by a field you don't want 'relevance' order
27
+ raise UsageError, "Sort mode 'relevance' is not valid with a sort_by field"
28
+ end
29
+ request.sort_by = sort_by.to_s
30
+ end
25
31
 
26
- if SPHINX_CLIENT_PARAMS['sort_mode'][opts['sort_mode']]
27
- request.SetSortMode SPHINX_CLIENT_PARAMS['sort_mode'][opts['sort_mode']], opts['sort_by'].to_s
32
+ if sort_mode = SPHINX_CLIENT_PARAMS['sort_mode'][opts['sort_mode']]
33
+ request.sort_mode = sort_mode
28
34
  else
29
35
  raise UsageError, "Sort mode #{opts['sort_mode'].inspect} is invalid"
30
- end
31
-
32
- if weights = opts['weights']
33
- # Order the weights hash according to the field order for Sphinx, and set the missing fields to 1.0
34
- request.SetWeights(Fields.instance.types.select{|n,t| t == 'text'}.map(&:first).sort.inject([]) do |array, field|
36
+ end
37
+
38
+ # Weighting
39
+ weights = opts['weights']
40
+ if weights.any?
41
+ # Order according to the field order for Sphinx, and set the missing fields to 1.0
42
+ request.weights = (Fields.instance.types.select{|n,t| t == 'text'}.map(&:first).sort.inject([]) do |array, field|
35
43
  array << (weights[field] || 1.0)
36
44
  end)
37
45
  end
46
+
47
+ # Class names
48
+ unless Array(opts['class_names']).empty?
49
+ request.filters << Riddle::Client::Filter.new(
50
+ 'class_id',
51
+ (opts['class_names'].map do |model|
52
+ MODELS_TO_IDS[model.to_s] or
53
+ MODELS_TO_IDS[model.to_s.constantize.base_class.to_s] or
54
+ raise UsageError, "Invalid class name #{model.inspect}"
55
+ end),
56
+ false)
57
+ end
38
58
 
39
- unless opts['class_names'].compact.empty?
40
- request.SetFilter('class_id', (opts['class_names'].map do |model|
41
- MODELS_TO_IDS[model.to_s] or
42
- MODELS_TO_IDS[model.to_s.constantize.base_class.to_s] or
43
- raise UsageError, "Invalid class name #{model.inspect}"
44
- end))
45
- end
46
-
47
- # Extract ranged raw filters
48
- # Some of this mangling might not be necessary
49
- opts['filters'].each do |field, value|
59
+ # Extract raw filters
60
+ # XXX We should coerce based on the Field values, not on the class
61
+ Array(opts['filters']).each do |field, value|
50
62
  field = field.to_s
51
63
  unless Fields.instance.types[field]
52
- raise Sphinx::SphinxArgumentError, "field #{field.inspect} is invalid"
64
+ raise UsageError, "field #{field.inspect} is invalid"
53
65
  end
54
66
  begin
55
67
  case value
56
- when Fixnum, Float, BigDecimal, NilClass, Array
57
- request.SetFilter field, Array(value)
68
+ when Integer, Float, BigDecimal, NilClass, Array
69
+ # Just bomb the filter in there
70
+ request.filters << Riddle::Client::Filter.new(field, Array(value), false)
58
71
  when Range
59
- min, max = [value.begin, value.end].map do |x|
60
- x._to_numeric
61
- end
72
+ # Make sure ranges point in the right direction
73
+ min, max = [value.begin, value.end].map {|x| x._to_numeric }
62
74
  raise NoMethodError unless min <=> max and max <=> min
63
75
  min, max = max, min if min > max
64
- request.SetFilterRange field, min, max
76
+ request.filters << Riddle::Client::Filter.new(field, min..max, false)
65
77
  when String
78
+ # XXX Hack to move text filters into the query
66
79
  opts['parsed_query'] << " @#{field} #{value}"
67
80
  else
68
81
  raise NoMethodError
69
82
  end
70
83
  rescue NoMethodError => e
71
- raise Sphinx::SphinxArgumentError, "filter value #{value.inspect} for field #{field.inspect} is invalid"
84
+ raise UsageError, "filter value #{value.inspect} for field #{field.inspect} is invalid"
72
85
  end
73
86
  end
74
87
 
@@ -77,7 +90,7 @@ module Ultrasphinx
77
90
 
78
91
  def get_subtotals(original_request, query)
79
92
  request = original_request._deep_dup
80
- request.instance_eval { @filters.delete_if {|f| f['attr'] == 'class_id'} }
93
+ request.instance_eval { @filters.delete_if {|filter| filter.attribute == 'class_id'} }
81
94
 
82
95
  facets = get_facets(request, query, 'class_id')
83
96
 
@@ -91,26 +104,37 @@ module Ultrasphinx
91
104
  request, facet = original_request._deep_dup, original_facet
92
105
  facet += "_facet" if Fields.instance.types[original_facet] == 'text'
93
106
 
94
- raise UsageError, "Field #{original_facet} does not exist or was not configured for faceting" unless Fields.instance.types[facet]
95
-
107
+ unless Fields.instance.types[facet]
108
+ if facet == original_facet
109
+ raise UsageError, "Field #{original_facet} does not exist"
110
+ else
111
+ raise UsageError, "Field #{original_facet} is a text field, but was not configured for text faceting"
112
+ end
113
+ end
114
+
96
115
  # Set the facet query parameter and modify per-page setting so we snag all the facets
97
- request.SetGroupBy(facet, Sphinx::Client::SPH_GROUPBY_ATTR, '@count desc')
98
- limit = self.class.client_options['max_facets']
99
- request.SetLimits 0, limit, [limit, MAX_MATCHES].min
116
+ request.instance_eval do
117
+ @group_by = facet
118
+ @group_function = :attr
119
+ @group_clauses = '@count desc'
120
+ @offset = 0
121
+ @limit = Ultrasphinx::Search.client_options['max_facets']
122
+ @max_matches = [@limit, MAX_MATCHES].min
123
+ end
100
124
 
101
125
  # Run the query
102
126
  begin
103
- matches = request.Query(query)['matches']
104
- rescue Sphinx::SphinxInternalError
105
- raise ConfigurationError, "Index is out of date. Run 'rake ultrasphinx:index'"
127
+ matches = request.query(query, UNIFIED_INDEX_NAME)[:matches]
128
+ rescue DaemonError
129
+ raise ConfigurationError, "Index seems out of date. Run 'rake ultrasphinx:index'"
106
130
  end
107
131
 
108
132
  # Map the facets back to something sane
109
133
  facets = {}
110
134
  matches.each do |match|
111
- match = match.last['attrs'] # :(
112
- raise ResponseError if facets[match['@groupby']]
113
- facets[match['@groupby']] = match['@count']
135
+ attributes = match[:attributes]
136
+ raise DaemonError if facets[attributes['@groupby']]
137
+ facets[attributes['@groupby']] = attributes['@count']
114
138
  end
115
139
 
116
140
  # Invert hash's, if we have them
@@ -155,69 +179,76 @@ module Ultrasphinx
155
179
  end
156
180
  FACET_CACHE[facet]
157
181
  end
158
-
159
- def reify_results(sphinx_ids)
160
-
161
- # Order by position and then toss the rest of the data
162
- sphinx_ids = sphinx_ids.sort_by do |key, value|
163
- value['index'] or raise ConfigurationError, "Your Sphinx client is not properly patched."
164
- end.map(&:first)
165
-
166
- # Inverse-modulus map the sphinx ids to the table-specific ids
167
- ids = Hash.new([])
168
- sphinx_ids.each do |id|
169
- ids[MODELS_TO_IDS.invert[id % MODELS_TO_IDS.size]] += [id / MODELS_TO_IDS.size] # yay math
182
+
183
+ # Inverse-modulus map the Sphinx ids to the table-specific ids
184
+ def convert_sphinx_ids(sphinx_ids)
185
+ sphinx_ids.sort_by do |item|
186
+ item[:index]
187
+ end.map do |item|
188
+ class_name = MODELS_TO_IDS.invert[item[:doc] % MODELS_TO_IDS.size]
189
+ raise DaemonError, "Impossible Sphinx document id #{item[:doc]} in query result" unless class_name
190
+ [class_name, item[:doc] / MODELS_TO_IDS.size]
170
191
  end
171
- raise Sphinx::SphinxResponseError, "impossible document id in query result" unless ids.values.flatten.size == sphinx_ids.size
172
-
173
- # Fetch them for real
192
+ end
193
+
194
+ # Fetch them for real
195
+ def reify_results(ids)
174
196
  results = []
175
- ids.each do |model, id_set|
176
- klass = model.constantize
177
-
178
- finder = self.class.client_options['finder_methods'].detect do |method_name|
197
+
198
+ ids.each do |klass_name, id|
199
+
200
+ # What class and class method are we using to get the record?
201
+ klass = klass_name.constantize
202
+ finder = Ultrasphinx::Search.client_options['finder_methods'].detect do |method_name|
179
203
  klass.respond_to? method_name
180
204
  end
181
205
 
182
- # Ultrasphinx.say "using #{klass.name}.#{finder} as finder method"
183
-
184
- begin
185
- # XXX Does not use Memcached's multiget
186
- results += case instances = id_set.map { |id| klass.send(finder, id) }
187
- when Hash
188
- instances.values
189
- when Array
190
- instances
191
- else
192
- Array(instances)
206
+ # Load it
207
+ record = begin
208
+ # XXX Does not use Memcached's multiget, or MySQL's, for that matter
209
+ klass.send(finder, id)
210
+ rescue ActiveRecord::RecordNotFound => e
211
+ if Ultrasphinx::Search.client_options['ignore_missing_records']
212
+ # XXX Should maybe adjust the total_found count, etc
213
+ else
214
+ raise(e)
193
215
  end
194
- rescue ActiveRecord::ActiveRecordError => e
195
- raise Sphinx::SphinxResponseError, e.inspect
196
- end
197
- end
198
-
199
- # Put them back in order
200
- results.sort_by do |r|
201
- raise Sphinx::SphinxResponseError, "Bogus ActiveRecord id for #{r.class}:#{r.id}" unless r.id
202
-
203
- model_index = MODELS_TO_IDS[r.class.base_class.name]
204
- raise UsageError, "#{r.class.base_class} is not an indexed class. Maybe you indexed an STI child class instead of the base class?" unless model_index
205
-
206
- index = (sphinx_ids.index(sphinx_id = r.id * MODELS_TO_IDS.size + model_index))
207
- raise Sphinx::SphinxResponseError, "Bogus reverse id for #{r.class}:#{r.id} (Sphinx:#{sphinx_id})" unless index
216
+ end
208
217
 
209
- index / sphinx_ids.size.to_f
218
+ # Add it to the list. Cache_fu does funny things with returned record organization.
219
+ results += record.is_a?(Hash) ? record.values : Array(record)
210
220
  end
211
-
212
- # Add an accessor for absolute search rank for each record
213
- results.each_with_index do |r, index|
221
+
222
+ # Add an accessor for absolute search rank for each record (does anyone use this?)
223
+ results.each_with_index do |result, index|
214
224
  i = per_page * (current_page - 1) + index
215
- r._metaclass.send('define_method', 'result_index') { i }
225
+ result._metaclass.send('define_method', 'result_index') { i }
216
226
  end
217
227
 
218
228
  results
219
229
  end
220
-
230
+
231
+ def perform_action_with_retries
232
+ tries = 0
233
+ begin
234
+ yield
235
+ rescue NoMethodError,
236
+ Riddle::VersionError,
237
+ Riddle::ResponseError,
238
+ Errno::ECONNREFUSED,
239
+ Errno::ECONNRESET,
240
+ Errno::EPIPE => e
241
+ tries += 1
242
+ if tries <= Ultrasphinx::Search.client_options['max_retries']
243
+ say "restarting query (#{tries} attempts already) (#{e})"
244
+ sleep(Ultrasphinx::Search.client_options['retry_sleep_time'])
245
+ retry
246
+ else
247
+ say "query failed"
248
+ raise DaemonError, e.to_s
249
+ end
250
+ end
251
+ end
221
252
 
222
253
  def strip_bogus_characters(s)
223
254
  # Used to remove some garbage before highlighting
@@ -225,7 +256,7 @@ module Ultrasphinx
225
256
  end
226
257
 
227
258
  def strip_query_commands(s)
228
- # XXX Hack for query commands, since sphinx doesn't intelligently parse the query in excerpt mode
259
+ # XXX Hack for query commands, since Sphinx doesn't intelligently parse the query in excerpt mode
229
260
  # Also removes apostrophes in the middle of words so that they don't get split in two.
230
261
  s.gsub(/(^|\s)(AND|OR|NOT|\@\w+)(\s|$)/i, "").gsub(/(\w)\'(\w)/, '\1\2')
231
262
  end
@@ -34,16 +34,8 @@ module Ultrasphinx
34
34
 
35
35
  MAX_WORDS = 2**16 # maximum number of stopwords built
36
36
 
37
- EMPTY_SEARCHABLE = "__empty_searchable__"
38
-
39
37
  UNIFIED_INDEX_NAME = "complete"
40
38
 
41
- SPHINX_VERSION = if `which indexer` =~ /\/indexer\n/m
42
- `indexer`.split("\n").first[7..-1]
43
- else
44
- "unknown"
45
- end
46
-
47
39
  CONFIG_MAP = {
48
40
  # These must be symbols for key mapping against Rails itself
49
41
  :username => 'sql_user',
@@ -1,5 +1,6 @@
1
1
  class Geo::State < ActiveRecord::Base
2
- has_many :"geo/addresses"
2
+ has_many :addresses, :class_name => "Geo::Address"
3
3
 
4
4
  is_indexed :concatenate => [{:class_name => 'Geo::Address', :field => 'name', :as => 'address_name'}]
5
+ #:fields => [{:field => 'abbreviation', :as => 'company_name'}],
5
6
  end
@@ -3,7 +3,8 @@ class User < ActiveRecord::Base
3
3
  has_one :address, :class_name => "Geo::Address"
4
4
 
5
5
  is_indexed :fields => ['login', 'email', 'deleted'],
6
- :include => [{:class_name => 'Seller', :field => 'company_name', :as => 'company'}],
6
+ :include => [{:class_name => 'Seller', :field => 'company_name', :as => 'company'},
7
+ {:class_name => 'Seller', :field => 'sellers_two.company_name', :as => 'company_two', 'association_sql' => 'LEFT OUTER JOIN sellers AS sellers_two ON users.id = sellers_two.user_id', 'function_sql' => "REPLACE(?, '6', ' replacement ')"}],
7
8
  :conditions => 'deleted = 0'
8
9
 
9
10
  end
@@ -6,6 +6,4 @@ Rails::Initializer.run do |config|
6
6
  config.load_paths << "#{RAILS_ROOT}/app/models/person" # moduleless model path
7
7
  end
8
8
 
9
- Ultrasphinx::Search.client_options['with_subtotals'] = true
10
-
11
9
  # Dependencies.log_activity = true
@@ -1,7 +1,7 @@
1
1
 
2
- # Auto-generated at Mon Oct 08 17:02:18 -0400 2007.
2
+ # Auto-generated at Wed Nov 14 03:32:12 -0500 2007.
3
3
  # Hand modifications will be overwritten.
4
- # /Users/eweaver/Desktop/projects/chow/vendor/plugins/ultrasphinx/test/integration/app/config/ultrasphinx/default.base
4
+ # /Users/eweaver/Desktop/projects/fauna/ultrasphinx/trunk/test/integration/app/config/ultrasphinx/default.base
5
5
  indexer {
6
6
  mem_limit = 256M
7
7
  }
@@ -34,7 +34,7 @@ sql_host = localhost
34
34
  sql_pass =
35
35
  sql_user = root
36
36
  sql_query_range = SELECT MIN(id), MAX(id) FROM states
37
- sql_query = SELECT (states.id * 4 + 0) AS id, CAST(GROUP_CONCAT(addresses.name SEPARATOR ' ') AS CHAR) AS address_name, 0 AS capitalization, 'Geo::State' AS class, 0 AS class_id, '' AS company, '' AS company_name, 0 AS company_name_facet, '' AS content, UNIX_TIMESTAMP('1970-01-01 00:00:00') AS created_at, 0 AS deleted, '' AS email, '__empty_searchable__' AS empty_searchable, '' AS login, '' AS mission_statement, '' AS name, '' AS state, 0 AS user_id FROM states LEFT OUTER JOIN addresses ON states.id = addresses.state_id WHERE states.id >= $start AND states.id <= $end GROUP BY id
37
+ sql_query = SELECT (states.id * 4 + 0) AS id, CAST(GROUP_CONCAT(DISTINCT addresses.name SEPARATOR ' ') AS CHAR) AS address_name, 0 AS capitalization, 'Geo::State' AS class, 0 AS class_id, '' AS company, '' AS company_name, 0 AS company_name_facet, '' AS company_two, '' AS content, UNIX_TIMESTAMP('1970-01-01 00:00:00') AS created_at, 0 AS deleted, '' AS email, '' AS login, '' AS mission_statement, '' AS name, '' AS state, 0 AS user_id FROM states LEFT OUTER JOIN addresses AS addresses ON states.id = addresses.state_id WHERE states.id >= $start AND states.id <= $end GROUP BY id
38
38
 
39
39
  sql_group_column = capitalization
40
40
  sql_group_column = class_id
@@ -65,7 +65,7 @@ sql_host = localhost
65
65
  sql_pass =
66
66
  sql_user = root
67
67
  sql_query_range = SELECT MIN(id), MAX(id) FROM sellers
68
- sql_query = SELECT (sellers.id * 4 + 1) AS id, '' AS address_name, sellers.capitalization AS capitalization, 'Seller' AS class, 1 AS class_id, '' AS company, sellers.company_name AS company_name, CRC32(sellers.company_name) AS company_name_facet, '' AS content, UNIX_TIMESTAMP(sellers.created_at) AS created_at, 0 AS deleted, '' AS email, '__empty_searchable__' AS empty_searchable, '' AS login, sellers.mission_statement AS mission_statement, '' AS name, '' AS state, sellers.user_id AS user_id FROM sellers WHERE sellers.id >= $start AND sellers.id <= $end GROUP BY id
68
+ sql_query = SELECT (sellers.id * 4 + 1) AS id, '' AS address_name, sellers.capitalization AS capitalization, 'Seller' AS class, 1 AS class_id, '' AS company, sellers.company_name AS company_name, CRC32(sellers.company_name) AS company_name_facet, '' AS company_two, '' AS content, UNIX_TIMESTAMP(sellers.created_at) AS created_at, 0 AS deleted, '' AS email, '' AS login, sellers.mission_statement AS mission_statement, '' AS name, '' AS state, sellers.user_id AS user_id FROM sellers WHERE sellers.id >= $start AND sellers.id <= $end GROUP BY id
69
69
 
70
70
  sql_group_column = capitalization
71
71
  sql_group_column = class_id
@@ -96,7 +96,7 @@ sql_host = localhost
96
96
  sql_pass =
97
97
  sql_user = root
98
98
  sql_query_range = SELECT MIN(id), MAX(id) FROM addresses
99
- sql_query = SELECT (addresses.id * 4 + 2) AS id, '' AS address_name, 0 AS capitalization, 'Geo::Address' AS class, 2 AS class_id, '' AS company, '' AS company_name, 0 AS company_name_facet, CONCAT_WS(' ', addresses.line_1, addresses.line_2, addresses.city, addresses.province_region, addresses.zip_postal_code) AS content, UNIX_TIMESTAMP('1970-01-01 00:00:00') AS created_at, 0 AS deleted, '' AS email, '__empty_searchable__' AS empty_searchable, '' AS login, '' AS mission_statement, addresses.name AS name, states.name AS state, 0 AS user_id FROM addresses LEFT OUTER JOIN states ON states.id = addresses.state_id WHERE addresses.id >= $start AND addresses.id <= $end GROUP BY id
99
+ sql_query = SELECT (addresses.id * 4 + 2) AS id, '' AS address_name, 0 AS capitalization, 'Geo::Address' AS class, 2 AS class_id, '' AS company, '' AS company_name, 0 AS company_name_facet, '' AS company_two, CONCAT_WS(' ', addresses.line_1, addresses.line_2, addresses.city, addresses.province_region, addresses.zip_postal_code) AS content, UNIX_TIMESTAMP('1970-01-01 00:00:00') AS created_at, 0 AS deleted, '' AS email, '' AS login, '' AS mission_statement, addresses.name AS name, states.name AS state, 0 AS user_id FROM addresses LEFT OUTER JOIN states AS states ON states.id = addresses.state_id WHERE addresses.id >= $start AND addresses.id <= $end GROUP BY id
100
100
 
101
101
  sql_group_column = capitalization
102
102
  sql_group_column = class_id
@@ -127,7 +127,7 @@ sql_host = localhost
127
127
  sql_pass =
128
128
  sql_user = root
129
129
  sql_query_range = SELECT MIN(id), MAX(id) FROM users
130
- sql_query = SELECT (users.id * 4 + 3) AS id, '' AS address_name, 0 AS capitalization, 'User' AS class, 3 AS class_id, sellers.company_name AS company, '' AS company_name, 0 AS company_name_facet, '' AS content, UNIX_TIMESTAMP('1970-01-01 00:00:00') AS created_at, users.deleted AS deleted, users.email AS email, '__empty_searchable__' AS empty_searchable, users.login AS login, '' AS mission_statement, '' AS name, '' AS state, 0 AS user_id FROM users LEFT OUTER JOIN sellers ON users.id = sellers.user_id WHERE users.id >= $start AND users.id <= $end AND (deleted = 0) GROUP BY id
130
+ sql_query = SELECT (users.id * 4 + 3) AS id, '' AS address_name, 0 AS capitalization, 'User' AS class, 3 AS class_id, sellers.company_name AS company, '' AS company_name, 0 AS company_name_facet, REPLACE(sellers_two.company_name, '6', ' replacement ') AS company_two, '' AS content, UNIX_TIMESTAMP('1970-01-01 00:00:00') AS created_at, users.deleted AS deleted, users.email AS email, users.login AS login, '' AS mission_statement, '' AS name, '' AS state, 0 AS user_id FROM users LEFT OUTER JOIN sellers AS sellers ON users.id = sellers.user_id LEFT OUTER JOIN sellers AS sellers_two ON users.id = sellers_two.user_id WHERE users.id >= $start AND users.id <= $end AND (deleted = 0) GROUP BY id
131
131
 
132
132
  sql_group_column = capitalization
133
133
  sql_group_column = class_id
@@ -1,7 +1,7 @@
1
1
 
2
- # Auto-generated at Mon Oct 08 15:40:47 -0400 2007.
2
+ # Auto-generated at Wed Nov 14 03:28:33 -0500 2007.
3
3
  # Hand modifications will be overwritten.
4
- # /Users/eweaver/Desktop/projects/chow/vendor/plugins/ultrasphinx/test/integration/app/config/ultrasphinx/default.base
4
+ # /Users/eweaver/Desktop/projects/fauna/ultrasphinx/trunk/test/integration/app/config/ultrasphinx/default.base
5
5
  indexer {
6
6
  mem_limit = 256M
7
7
  }
@@ -34,7 +34,7 @@ sql_host = localhost
34
34
  sql_pass =
35
35
  sql_user = root
36
36
  sql_query_range = SELECT MIN(id), MAX(id) FROM states
37
- sql_query = SELECT (states.id * 4 + 0) AS id, CAST(GROUP_CONCAT(addresses.name SEPARATOR ' ') AS CHAR) AS address_name, 0 AS capitalization, 'Geo::State' AS class, 0 AS class_id, '' AS company, '' AS company_name, 0 AS company_name_facet, '' AS content, UNIX_TIMESTAMP('1970-01-01 00:00:00') AS created_at, 0 AS deleted, '' AS email, '__empty_searchable__' AS empty_searchable, '' AS login, '' AS mission_statement, '' AS name, '' AS state, 0 AS user_id FROM states LEFT OUTER JOIN addresses ON states.id = addresses.state_id WHERE states.id >= $start AND states.id <= $end GROUP BY id
37
+ sql_query = SELECT (states.id * 4 + 0) AS id, CAST(GROUP_CONCAT(DISTINCT addresses.name SEPARATOR ' ') AS CHAR) AS address_name, 0 AS capitalization, 'Geo::State' AS class, 0 AS class_id, '' AS company, '' AS company_name, 0 AS company_name_facet, '' AS company_two, '' AS content, UNIX_TIMESTAMP('1970-01-01 00:00:00') AS created_at, 0 AS deleted, '' AS email, '' AS login, '' AS mission_statement, '' AS name, '' AS state, 0 AS user_id FROM states LEFT OUTER JOIN addresses AS addresses ON states.id = addresses.state_id WHERE states.id >= $start AND states.id <= $end GROUP BY id
38
38
 
39
39
  sql_group_column = capitalization
40
40
  sql_group_column = class_id
@@ -65,7 +65,7 @@ sql_host = localhost
65
65
  sql_pass =
66
66
  sql_user = root
67
67
  sql_query_range = SELECT MIN(id), MAX(id) FROM sellers
68
- sql_query = SELECT (sellers.id * 4 + 1) AS id, '' AS address_name, sellers.capitalization AS capitalization, 'Seller' AS class, 1 AS class_id, '' AS company, sellers.company_name AS company_name, CRC32(sellers.company_name) AS company_name_facet, '' AS content, UNIX_TIMESTAMP(sellers.created_at) AS created_at, 0 AS deleted, '' AS email, '__empty_searchable__' AS empty_searchable, '' AS login, sellers.mission_statement AS mission_statement, '' AS name, '' AS state, sellers.user_id AS user_id FROM sellers WHERE sellers.id >= $start AND sellers.id <= $end GROUP BY id
68
+ sql_query = SELECT (sellers.id * 4 + 1) AS id, '' AS address_name, sellers.capitalization AS capitalization, 'Seller' AS class, 1 AS class_id, '' AS company, sellers.company_name AS company_name, CRC32(sellers.company_name) AS company_name_facet, '' AS company_two, '' AS content, UNIX_TIMESTAMP(sellers.created_at) AS created_at, 0 AS deleted, '' AS email, '' AS login, sellers.mission_statement AS mission_statement, '' AS name, '' AS state, sellers.user_id AS user_id FROM sellers WHERE sellers.id >= $start AND sellers.id <= $end GROUP BY id
69
69
 
70
70
  sql_group_column = capitalization
71
71
  sql_group_column = class_id
@@ -96,7 +96,7 @@ sql_host = localhost
96
96
  sql_pass =
97
97
  sql_user = root
98
98
  sql_query_range = SELECT MIN(id), MAX(id) FROM addresses
99
- sql_query = SELECT (addresses.id * 4 + 2) AS id, '' AS address_name, 0 AS capitalization, 'Geo::Address' AS class, 2 AS class_id, '' AS company, '' AS company_name, 0 AS company_name_facet, CONCAT_WS(' ', addresses.line_1, addresses.line_2, addresses.city, addresses.province_region, addresses.zip_postal_code) AS content, UNIX_TIMESTAMP('1970-01-01 00:00:00') AS created_at, 0 AS deleted, '' AS email, '__empty_searchable__' AS empty_searchable, '' AS login, '' AS mission_statement, addresses.name AS name, states.name AS state, 0 AS user_id FROM addresses LEFT OUTER JOIN states ON states.id = addresses.state_id WHERE addresses.id >= $start AND addresses.id <= $end GROUP BY id
99
+ sql_query = SELECT (addresses.id * 4 + 2) AS id, '' AS address_name, 0 AS capitalization, 'Geo::Address' AS class, 2 AS class_id, '' AS company, '' AS company_name, 0 AS company_name_facet, '' AS company_two, CONCAT_WS(' ', addresses.line_1, addresses.line_2, addresses.city, addresses.province_region, addresses.zip_postal_code) AS content, UNIX_TIMESTAMP('1970-01-01 00:00:00') AS created_at, 0 AS deleted, '' AS email, '' AS login, '' AS mission_statement, addresses.name AS name, states.name AS state, 0 AS user_id FROM addresses LEFT OUTER JOIN states AS states ON states.id = addresses.state_id WHERE addresses.id >= $start AND addresses.id <= $end GROUP BY id
100
100
 
101
101
  sql_group_column = capitalization
102
102
  sql_group_column = class_id
@@ -127,7 +127,7 @@ sql_host = localhost
127
127
  sql_pass =
128
128
  sql_user = root
129
129
  sql_query_range = SELECT MIN(id), MAX(id) FROM users
130
- sql_query = SELECT (users.id * 4 + 3) AS id, '' AS address_name, 0 AS capitalization, 'User' AS class, 3 AS class_id, sellers.company_name AS company, '' AS company_name, 0 AS company_name_facet, '' AS content, UNIX_TIMESTAMP('1970-01-01 00:00:00') AS created_at, users.deleted AS deleted, users.email AS email, '__empty_searchable__' AS empty_searchable, users.login AS login, '' AS mission_statement, '' AS name, '' AS state, 0 AS user_id FROM users LEFT OUTER JOIN sellers ON users.id = sellers.user_id WHERE users.id >= $start AND users.id <= $end AND (deleted = 0) GROUP BY id
130
+ sql_query = SELECT (users.id * 4 + 3) AS id, '' AS address_name, 0 AS capitalization, 'User' AS class, 3 AS class_id, sellers.company_name AS company, '' AS company_name, 0 AS company_name_facet, REPLACE(sellers_two.company_name, '6', ' replacement ') AS company_two, '' AS content, UNIX_TIMESTAMP('1970-01-01 00:00:00') AS created_at, users.deleted AS deleted, users.email AS email, users.login AS login, '' AS mission_statement, '' AS name, '' AS state, 0 AS user_id FROM users LEFT OUTER JOIN sellers AS sellers ON users.id = sellers.user_id LEFT OUTER JOIN sellers AS sellers_two ON users.id = sellers_two.user_id WHERE users.id >= $start AND users.id <= $end AND (deleted = 0) GROUP BY id
131
131
 
132
132
  sql_group_column = capitalization
133
133
  sql_group_column = class_id
@@ -5,7 +5,7 @@
5
5
  company_name: <%= "seller#{num}" %>
6
6
  capitalization: <%= num * 1.548 %>
7
7
  mission_statement: <%= %w(Add value through developing superior products.).sort_by(&:rand).join(" ") %>
8
- created_at: <%= (Time.now - num.weeks).to_s :db %>
9
- updated_at: <%= (Time.now - num.days).to_s :db %>
8
+ updated_at: <%= ((time = Time.parse("Tue Oct 23 04:28:11")) - num.days).to_s(:db) %>
9
+ created_at: <%= (time - num.weeks).to_s(:db) %>
10
10
  <% end %>
11
11