pager-ultrasphinx 1.0.20080510

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,367 @@
1
+
2
+ module Ultrasphinx
3
+ class Configure
4
+ class << self
5
+
6
+ include Associations
7
+
8
+ # Force all the indexed models to load and register in the MODEL_CONFIGURATION hash.
9
+ def load_constants
10
+
11
+ Dependencies.load_paths.grep(/\/models$/).each do |models_dir|
12
+ Dir.chdir(models_dir) do
13
+ Dir["**/*.rb"].each do |filename|
14
+ open(filename) do |file|
15
+ begin
16
+ if file.grep(/^\s+is_indexed/).any?
17
+ filename = filename[0..-4]
18
+ begin
19
+ File.basename(filename).camelize.constantize
20
+ rescue NameError => e
21
+ filename.camelize.constantize
22
+ end
23
+ end
24
+ rescue Object => e
25
+ say "warning: critical autoload error on #{filename}; try referencing \"#{filename.camelize}\" directly in the console"
26
+ #say e.backtrace.join("\n") if RAILS_ENV == "development"
27
+ end
28
+ end
29
+ end
30
+ end
31
+ end
32
+
33
+ # Build the field-to-type mappings.
34
+ Fields.instance.configure(MODEL_CONFIGURATION)
35
+ end
36
+
37
+
38
+ # Main SQL builder.
39
+ def run
40
+
41
+ load_constants
42
+
43
+ say "rebuilding configurations for #{RAILS_ENV} environment"
44
+ say "available models are #{MODEL_CONFIGURATION.keys.to_sentence}"
45
+ File.open(CONF_PATH, "w") do |conf|
46
+ conf.puts global_header
47
+ say "generating SQL"
48
+
49
+ INDEXES.each do |index|
50
+ sources = []
51
+ cached_groups = Fields.instance.groups.join("\n")
52
+
53
+ MODEL_CONFIGURATION.each_with_index do |model_and_options, class_id|
54
+ # This relies on hash sort order being deterministic per-machine
55
+ model, options = model_and_options
56
+ klass = model.constantize
57
+ source = "#{model.tableize.gsub('/', '__')}_#{index}"
58
+
59
+ if index != DELTA_INDEX or options['delta']
60
+ # If we are building the delta, we only want to include the models that requested it
61
+ conf.puts build_source(index, Fields.instance, model, options, class_id, klass, source, cached_groups)
62
+ sources << source
63
+ end
64
+ end
65
+
66
+ if sources.any?
67
+ # Don't generate a delta index if there are no delta tables
68
+ conf.puts build_index(index, sources)
69
+ end
70
+
71
+ end
72
+ end
73
+ end
74
+
75
+
76
+ ######
77
+
78
+ private
79
+
80
+ def global_header
81
+ ["\n# Auto-generated at #{Time.now}.",
82
+ "# Hand modifications will be overwritten.",
83
+ "# #{BASE_PATH}\n",
84
+ INDEXER_SETTINGS.except('delta')._to_conf_string('indexer'),
85
+ "",
86
+ DAEMON_SETTINGS._to_conf_string("searchd")]
87
+ end
88
+
89
+
90
+ def setup_source_database(klass)
91
+ # Supporting Postgres now
92
+ connection_settings = klass.connection.instance_variable_get("@config")
93
+
94
+ adapter_defaults = DEFAULTS[ADAPTER]
95
+ raise ConfigurationError, "Unsupported database adapter" unless adapter_defaults
96
+
97
+ conf = [adapter_defaults]
98
+ connection_settings.reverse_merge(CONNECTION_DEFAULTS).each do |key, value|
99
+ conf << "#{CONFIG_MAP[key]} = #{value}" if CONFIG_MAP[key]
100
+ end
101
+ conf.sort.join("\n")
102
+ end
103
+
104
+
105
+ def build_delta_condition(index, klass, options)
106
+ if index == DELTA_INDEX and options['delta']
107
+ # Add delta condition if necessary
108
+ table, field = klass.table_name, options['delta']['field']
109
+ source_string = "#{table}.#{field}"
110
+ delta_column = klass.columns_hash[field]
111
+
112
+ if delta_column
113
+ raise ConfigurationError, "#{source_string} is not a :datetime" unless delta_column.type == :datetime
114
+ if (options['fields'] + options['concatenate'] + options['include']).detect { |entry| entry['sortable'] }
115
+ # Warning about the sortable problem
116
+ # XXX Kind of in an odd place, but I want to happen at index time
117
+ Ultrasphinx.say "warning; text sortable columns on #{klass.name} will return wrong results with partial delta indexing"
118
+ end
119
+
120
+ delta = INDEXER_SETTINGS['delta']
121
+ if delta
122
+ string = "#{source_string} > #{SQL_FUNCTIONS[ADAPTER]['delta']._interpolate(delta)}";
123
+ else
124
+ raise ConfigurationError, "No 'indexer { delta }' setting specified in '#{BASE_PATH}'"
125
+ end
126
+ else
127
+ Ultrasphinx.say "warning; #{klass.name} will reindex the entire table during delta indexing"
128
+ end
129
+ end
130
+ end
131
+
132
+
133
+ def setup_source_arrays(index, klass, fields, class_id, conditions, order)
134
+ condition_strings = Array(conditions).map do |condition|
135
+ "(#{condition})"
136
+ end
137
+
138
+ column_strings = [
139
+ "(#{klass.table_name}.#{klass.primary_key} * #{MODEL_CONFIGURATION.size} + #{class_id}) AS id",
140
+ "#{class_id} AS class_id", "'#{klass.name}' AS class"]
141
+ remaining_columns = fields.types.keys - ["class", "class_id"]
142
+ [column_strings, [], condition_strings, [], false, remaining_columns, order]
143
+ end
144
+
145
+
146
+ def range_select_string(klass, delta_condition)
147
+ ["sql_query_range = SELECT",
148
+ SQL_FUNCTIONS[ADAPTER]['range_cast']._interpolate("MIN(#{klass.primary_key})"),
149
+ ",",
150
+ SQL_FUNCTIONS[ADAPTER]['range_cast']._interpolate("MAX(#{klass.primary_key})"),
151
+ "FROM #{klass.table_name}",
152
+ ("WHERE #{delta_condition}" if delta_condition),
153
+ ].join(" ")
154
+ end
155
+
156
+
157
+ def query_info_string(klass, class_id)
158
+ "sql_query_info = SELECT * FROM #{klass.table_name} WHERE #{klass.table_name}.#{klass.primary_key} = (($id - #{class_id}) / #{MODEL_CONFIGURATION.size})"
159
+ end
160
+
161
+
162
+ def build_source(index, fields, model, options, class_id, klass, source, groups)
163
+
164
+ column_strings, join_strings, condition_strings, group_bys, use_distinct, remaining_columns, order =
165
+ setup_source_arrays(
166
+ index, klass, fields, class_id, options['conditions'], options['order'])
167
+
168
+ delta_condition =
169
+ build_delta_condition(
170
+ index, klass, options)
171
+ condition_strings << delta_condition if delta_condition
172
+
173
+ column_strings, join_strings, group_bys, remaining_columns =
174
+ build_regular_fields(
175
+ klass, fields, options['fields'], column_strings, join_strings, group_bys, remaining_columns)
176
+
177
+ column_strings, join_strings, group_bys, remaining_columns =
178
+ build_includes(
179
+ klass, fields, options['include'], column_strings, join_strings, group_bys, remaining_columns)
180
+
181
+ column_strings, join_strings, group_bys, use_distinct, remaining_columns =
182
+ build_concatenations(
183
+ klass, fields, options['concatenate'], column_strings, join_strings, group_bys, use_distinct, remaining_columns)
184
+
185
+ column_strings = add_missing_columns(fields, remaining_columns, column_strings)
186
+
187
+ ["\n# Source configuration\n\n",
188
+ "source #{source}\n{",
189
+ SOURCE_SETTINGS._to_conf_string,
190
+ setup_source_database(klass),
191
+ range_select_string(klass, delta_condition),
192
+ build_query(klass, column_strings, join_strings, condition_strings, use_distinct, group_bys, order),
193
+ "\n" + groups,
194
+ query_info_string(klass, class_id),
195
+ "}\n\n"]
196
+ end
197
+
198
+
199
+ def build_query(klass, column_strings, join_strings, condition_strings, use_distinct, group_bys, order)
200
+
201
+ primary_key = "#{klass.table_name}.#{klass.primary_key}"
202
+ group_bys = case ADAPTER
203
+ when 'mysql'
204
+ primary_key
205
+ when 'postgresql'
206
+ # Postgres is very fussy about GROUP_BY
207
+ ([primary_key] + group_bys.reject {|s| s == primary_key}.uniq.sort).join(', ')
208
+ end
209
+
210
+ ["sql_query =",
211
+ "SELECT",
212
+ # Avoid DISTINCT; it destroys performance
213
+ column_strings.sort_by do |string|
214
+ # Sphinx wants them always in the same order, but "id" must be first
215
+ (field = string[/.*AS (.*)/, 1]) == "id" ? "*" : field
216
+ end.join(", "),
217
+ "FROM #{klass.table_name}",
218
+ join_strings.uniq,
219
+ "WHERE #{primary_key} >= $start AND #{primary_key} <= $end",
220
+ condition_strings.uniq.map {|condition| "AND #{condition}" },
221
+ "GROUP BY #{group_bys}",
222
+ ("ORDER BY #{order}" if order)
223
+ ].flatten.compact.join(" ")
224
+ end
225
+
226
+
227
+ def add_missing_columns(fields, remaining_columns, column_strings)
228
+ remaining_columns.each do |field|
229
+ column_strings << fields.null(field)
230
+ end
231
+ column_strings
232
+ end
233
+
234
+
235
+ def build_regular_fields(klass, fields, entries, column_strings, join_strings, group_bys, remaining_columns)
236
+ entries.to_a.each do |entry|
237
+ source_string = "#{entry['table_alias']}.#{entry['field']}"
238
+ group_bys << source_string
239
+ column_strings, remaining_columns = install_field(fields, source_string, entry['as'], entry['function_sql'], entry['facet'], entry['sortable'], column_strings, remaining_columns)
240
+ end
241
+
242
+ [column_strings, join_strings, group_bys, remaining_columns]
243
+ end
244
+
245
+
246
+ def build_includes(klass, fields, entries, column_strings, join_strings, group_bys, remaining_columns)
247
+ entries.to_a.each do |entry|
248
+ raise ConfigurationError, "You must identify your association with either class_name or association_name, but not both" if entry['class_name'] && entry ['association_name']
249
+
250
+ association = get_association(klass, entry)
251
+
252
+ # You can use 'class_name' and 'association_sql' to associate to a model that doesn't actually
253
+ # have an association.
254
+ join_klass = association ? association.class_name.constantize : entry['class_name'].constantize
255
+
256
+ raise ConfigurationError, "Unknown association from #{klass} to #{entry['class_name'] || entry['association_name']}" if not association and not entry['association_sql']
257
+
258
+ join_strings = install_join_unless_association_sql(entry['association_sql'], nil, join_strings) do
259
+ "LEFT OUTER JOIN #{join_klass.table_name} AS #{entry['table_alias']} ON " +
260
+ if (macro = association.macro) == :belongs_to
261
+ "#{entry['table_alias']}.#{join_klass.primary_key} = #{klass.table_name}.#{association.primary_key_name}"
262
+ elsif macro == :has_one
263
+ "#{klass.table_name}.#{klass.primary_key} = #{entry['table_alias']}.#{association.primary_key_name}"
264
+ else
265
+ raise ConfigurationError, "Unidentified association macro #{macro.inspect}. Please use the :association_sql key to manually specify the JOIN syntax."
266
+ end
267
+ end
268
+
269
+ source_string = "#{entry['table_alias']}.#{entry['field']}"
270
+ group_bys << source_string
271
+ column_strings, remaining_columns = install_field(fields, source_string, entry['as'], entry['function_sql'], entry['facet'], entry['sortable'], column_strings, remaining_columns)
272
+ end
273
+
274
+ [column_strings, join_strings, group_bys, remaining_columns]
275
+ end
276
+
277
+
278
+ def build_concatenations(klass, fields, entries, column_strings, join_strings, group_bys, use_distinct, remaining_columns)
279
+ entries.to_a.each do |entry|
280
+ if entry['field']
281
+ # Group concats
282
+
283
+ # Only has_many's or explicit sql right now.
284
+ association = get_association(klass, entry)
285
+
286
+ # You can use 'class_name' and 'association_sql' to associate to a model that doesn't actually
287
+ # have an association. The automatic choice of a table alias chosen might be kind of strange.
288
+ join_klass = association ? association.class_name.constantize : entry['class_name'].constantize
289
+
290
+ join_strings = install_join_unless_association_sql(entry['association_sql'], nil, join_strings) do
291
+ # XXX The foreign key is not verified for polymorphic relationships.
292
+ association = get_association(klass, entry)
293
+ "LEFT OUTER JOIN #{join_klass.table_name} AS #{entry['table_alias']} ON #{klass.table_name}.#{klass.primary_key} = #{entry['table_alias']}.#{association.primary_key_name}" +
294
+ # XXX Is this valid?
295
+ (entry['conditions'] ? " AND (#{entry['conditions']})" : "")
296
+ end
297
+
298
+ source_string = "#{entry['table_alias']}.#{entry['field']}"
299
+ order_string = ("ORDER BY #{entry['order']}" if entry['order'])
300
+ # We are using the field in an aggregate, so we don't want to add it to group_bys
301
+ source_string = SQL_FUNCTIONS[ADAPTER]['group_concat']._interpolate(source_string, order_string)
302
+ use_distinct = true
303
+
304
+ column_strings, remaining_columns = install_field(fields, source_string, entry['as'], entry['function_sql'], entry['facet'], entry['sortable'], column_strings, remaining_columns)
305
+
306
+ elsif entry['fields']
307
+ # Regular concats
308
+ source_string = "CONCAT_WS(' ', " + entry['fields'].map do |subfield|
309
+ "#{entry['table_alias']}.#{subfield}"
310
+ end.each do |subsource_string|
311
+ group_bys << subsource_string
312
+ end.join(', ') + ")"
313
+
314
+ column_strings, remaining_columns = install_field(fields, source_string, entry['as'], entry['function_sql'], entry['facet'], entry['sortable'], column_strings, remaining_columns)
315
+
316
+ else
317
+ raise ConfigurationError, "Invalid concatenate parameters for #{model}: #{entry.inspect}."
318
+ end
319
+ end
320
+
321
+ [column_strings, join_strings, group_bys, use_distinct, remaining_columns]
322
+ end
323
+
324
+
325
+ def build_index(index, sources)
326
+ ["\n# Index configuration\n\n",
327
+ "index #{index}\n{",
328
+ sources.sort.map do |source|
329
+ " source = #{source}"
330
+ end.join("\n"),
331
+ INDEX_SETTINGS.merge('path' => INDEX_SETTINGS['path'] + "/sphinx_index_#{index}")._to_conf_string,
332
+ "}\n\n"]
333
+ end
334
+
335
+
336
+ def install_field(fields, source_string, as, function_sql, with_facet, with_sortable, column_strings, remaining_columns)
337
+ source_string = function_sql._interpolate(source_string) if function_sql
338
+
339
+ column_strings << fields.cast(source_string, as)
340
+ remaining_columns.delete(as)
341
+
342
+ # Generate duplicate text fields for sorting
343
+ if with_sortable
344
+ column_strings << fields.cast(source_string, "#{as}_sortable")
345
+ remaining_columns.delete("#{as}_sortable")
346
+ end
347
+
348
+ # Generate hashed integer fields for text grouping
349
+ if with_facet
350
+ column_strings << "#{SQL_FUNCTIONS[ADAPTER]['hash']._interpolate(source_string)} AS #{as}_facet"
351
+ remaining_columns.delete("#{as}_facet")
352
+ end
353
+ [column_strings, remaining_columns]
354
+ end
355
+
356
+
357
+ def install_join_unless_association_sql(association_sql, join_string, join_strings)
358
+ join_strings << (association_sql or join_string or yield)
359
+ end
360
+
361
+ def say(s)
362
+ Ultrasphinx.say s
363
+ end
364
+
365
+ end
366
+ end
367
+ end
@@ -0,0 +1,132 @@
1
+
2
+ class Array
3
+ # Only flatten the first level of an array
4
+ def _flatten_once
5
+ self.inject([]) do |set, element|
6
+ set + Array(element)
7
+ end
8
+ end
9
+
10
+ def _sum
11
+ self.inject(0) do |acc, element|
12
+ acc + element
13
+ end
14
+ end
15
+ end
16
+
17
+ class Object
18
+ def _metaclass
19
+ class << self
20
+ self
21
+ end
22
+ end
23
+
24
+ def _deep_dup
25
+ # Cause Ruby's clone/dup sucks.
26
+ Marshal.load(Marshal.dump(self))
27
+ end
28
+ end
29
+
30
+ #class HashWithIndifferentAccess
31
+ # # Returns a regular Hash with all string keys. Much faster
32
+ # # than HWIA#merge.
33
+ # def _fast_merge(right)
34
+ # left = Hash[self]
35
+ # left.merge!(self.class.new(right))
36
+ # end
37
+ #end
38
+
39
+ class Hash
40
+ def _coerce_basic_types
41
+ # XXX To remove
42
+ Hash[*self.map do |key, value|
43
+ [key.to_s,
44
+ if value.respond_to?(:to_i) && value.to_i.to_s == value
45
+ value.to_i
46
+ elsif value == ""
47
+ nil
48
+ else
49
+ value
50
+ end]
51
+ end._flatten_once]
52
+ end
53
+
54
+ def _stringify_all!(*skip)
55
+ # Stringifies all keys, and stringifies all values except those slotted for keys in 'skip'
56
+ stringify_keys!
57
+ self.except(*skip).each do |key, value|
58
+ self[key] = value.to_s
59
+ end
60
+ self
61
+ end
62
+
63
+ # Delete by multiple keys
64
+ def _delete(*args)
65
+ args.map do |key|
66
+ self.delete key
67
+ end
68
+ end
69
+
70
+ # Convert a hash to a Sphinx-style conf string
71
+ def _to_conf_string(section = nil)
72
+ inner = self.map do |key, value|
73
+ " #{key} = #{value}"
74
+ end.join("\n")
75
+ section ? "#{section} {\n#{inner}\n}\n" : inner
76
+ end
77
+
78
+ unless Hash.new.respond_to? :except
79
+ # Rails 1.2.6 compatibility
80
+ def except(*keys)
81
+ rejected = Set.new(respond_to?(:convert_key) ? keys.map { |key| convert_key(key) } : keys)
82
+ reject { |key,| rejected.include?(key) }
83
+ end
84
+ def except!(*keys)
85
+ replace(except(*keys))
86
+ end
87
+ end
88
+
89
+ end
90
+
91
+ ### Filter type coercion methods
92
+
93
+ class String
94
+ # XXX Not used enough to justify such a strange abstraction
95
+ def _to_numeric
96
+ zeroless = self.squeeze(" ").strip.sub(/^0+(\d)/, '\1')
97
+ zeroless.sub!(/(\...*?)0+$/, '\1')
98
+ if zeroless.to_i.to_s == zeroless
99
+ zeroless.to_i
100
+ elsif zeroless.to_f.to_s == zeroless
101
+ zeroless.to_f
102
+ elsif date = Chronic.parse(self.gsub(/(\d)([^\d\:\s])/, '\1 \2')) # Improve Chronic's flexibility a little
103
+ date.to_i
104
+ else
105
+ raise Ultrasphinx::UsageError, "#{self.inspect} could not be coerced into a numeric value"
106
+ end
107
+ end
108
+
109
+ # Interpolate SQL field names into functions
110
+ def _interpolate(*values)
111
+ if values.size == 1
112
+ self.gsub('?', values.first)
113
+ else
114
+ values.inject(self) { |result, value| result.sub('?', value.to_s) }
115
+ end
116
+ end
117
+
118
+ end
119
+
120
+ module Ultrasphinx::NumericSelf
121
+ def _to_numeric; self; end
122
+ end
123
+
124
+ module Ultrasphinx::DateSelf
125
+ def _to_numeric; self.to_i; end
126
+ end
127
+
128
+ class Fixnum; include Ultrasphinx::NumericSelf; end
129
+ class Bignum; include Ultrasphinx::NumericSelf; end
130
+ class Float; include Ultrasphinx::NumericSelf; end
131
+ class Date; include Ultrasphinx::DateSelf; end
132
+ class Time; include Ultrasphinx::DateSelf; end
@@ -0,0 +1,198 @@
1
+
2
+ module Ultrasphinx
3
+
4
+ =begin rdoc
5
+ This is a special singleton configuration class that stores the index field configurations. Rather than using a magic hash and including relevant behavior in Ultrasphinx::Configure and Ultrasphinx::Search, we unify it here.
6
+ =end
7
+
8
+ class Fields
9
+ # XXX Class needs a big refactoring; one of the worst parts of Ultrasphinx
10
+
11
+ include Singleton
12
+ include Associations
13
+
14
+ TYPE_MAP = {
15
+ 'string' => 'text',
16
+ 'text' => 'text',
17
+ 'integer' => 'integer',
18
+ 'date' => 'date',
19
+ 'datetime' => 'date',
20
+ 'timestamp' => 'date',
21
+ 'float' => 'float',
22
+ 'boolean' => 'bool'
23
+ }
24
+
25
+ attr_accessor :classes, :types
26
+
27
+ def initialize
28
+ @types = {}
29
+ @classes = Hash.new([])
30
+ @groups = []
31
+ end
32
+
33
+
34
+ def groups
35
+ @groups.compact.sort_by do |string|
36
+ string[/= (.*)/, 1]
37
+ end
38
+ end
39
+
40
+
41
+ def save_and_verify_type(field, new_type, string_sortable, klass, msg = nil)
42
+ # Smoosh fields together based on their name in the Sphinx query schema
43
+ field, new_type = field.to_s, TYPE_MAP[new_type.to_s]
44
+
45
+ if types[field]
46
+ # Existing field name; verify its type
47
+ msg ||= "Column type mismatch for #{field.inspect}; was already #{types[field].inspect}, but is now #{new_type.inspect}."
48
+ raise ConfigurationError, msg unless types[field] == new_type
49
+ classes[field] = (classes[field] + [klass]).uniq
50
+
51
+ else
52
+ # New field
53
+ types[field] = new_type
54
+ classes[field] = [klass]
55
+
56
+ @groups << case new_type
57
+ when 'integer'
58
+ "sql_attr_uint = #{field}"
59
+ when 'float'
60
+ "sql_attr_float = #{field}"
61
+ when 'bool'
62
+ "sql_attr_bool = #{field}"
63
+ when 'date'
64
+ "sql_attr_timestamp = #{field}"
65
+ when 'text'
66
+ "sql_attr_str2ordinal = #{field}" if string_sortable
67
+ end
68
+ end
69
+ end
70
+
71
+
72
+ def cast(source_string, field)
73
+ if types[field] == "date"
74
+ "UNIX_TIMESTAMP(#{source_string})"
75
+ elsif types[field] == "integer"
76
+ source_string # "CAST(#{source_string} AS UNSIGNED)"
77
+ else
78
+ source_string
79
+ end + " AS #{field}"
80
+ end
81
+
82
+
83
+ def null(field)
84
+ case types[field]
85
+ when 'text'
86
+ "''"
87
+ when 'integer', 'float', 'bool'
88
+ "0"
89
+ when 'date'
90
+ "18000" # Midnight on 1/1/1970
91
+ when nil
92
+ raise "Field #{field} is missing"
93
+ else
94
+ raise "Field #{field} does not have a valid type #{types[field]}."
95
+ end + " AS #{field}"
96
+ end
97
+
98
+
99
+ def configure(configuration)
100
+
101
+ configuration.each do |model, options|
102
+
103
+ klass = model.constantize
104
+ save_and_verify_type('class_id', 'integer', nil, klass)
105
+ save_and_verify_type('class', 'string', nil, klass)
106
+
107
+ begin
108
+
109
+ # Fields are from the model
110
+ options['fields'] = options['fields'].to_a.map do |entry|
111
+ extract_table_alias!(entry, klass)
112
+ extract_field_alias!(entry, klass)
113
+
114
+ unless klass.columns_hash[entry['field']]
115
+ # XXX I think this is here for migrations
116
+ Ultrasphinx.say "warning: field #{entry['field']} is not present in #{model}"
117
+ else
118
+ save_and_verify_type(entry['as'], klass.columns_hash[entry['field']].type, nil, klass)
119
+ install_duplicate_fields!(entry, klass)
120
+ end
121
+ end
122
+
123
+ # Joins are whatever they are in the target
124
+ options['include'].to_a.each do |entry|
125
+ extract_table_alias!(entry, klass)
126
+ extract_field_alias!(entry, klass)
127
+
128
+ association_model = get_association_model(klass, entry)
129
+
130
+ save_and_verify_type(entry['as'] || entry['field'], association_model.columns_hash[entry['field']].type, nil, klass)
131
+ install_duplicate_fields!(entry, klass)
132
+ end
133
+
134
+ # Regular concats are CHAR, group_concats are BLOB and need to be cast to CHAR
135
+ options['concatenate'].to_a.each do |entry|
136
+ extract_table_alias!(entry, klass)
137
+ save_and_verify_type(entry['as'], 'text', nil, klass)
138
+ install_duplicate_fields!(entry, klass)
139
+ end
140
+
141
+ rescue ActiveRecord::StatementInvalid
142
+ Ultrasphinx.say "warning: model #{model} does not exist in the database yet"
143
+ end
144
+ end
145
+
146
+ self
147
+ end
148
+
149
+
150
+ def install_duplicate_fields!(entry, klass)
151
+ if entry['facet']
152
+ # Source must be a string
153
+ save_and_verify_type(entry['as'], 'text', nil, klass,
154
+ "#{klass}##{entry['as']}: 'facet' option is only valid for text fields; numeric fields are enabled by default")
155
+ # Install facet column
156
+ save_and_verify_type("#{entry['as']}_facet", 'integer', nil, klass)
157
+ end
158
+
159
+ if entry['sortable']
160
+ # Source must be a string
161
+ save_and_verify_type(entry['as'], 'text', nil, klass,
162
+ "#{klass}##{entry['as']}: 'sortable' option is only valid for text columns; numeric fields are enabled by default")
163
+ # Install sortable column
164
+ save_and_verify_type("#{entry['as']}_sortable", 'text', true, klass)
165
+ end
166
+ entry
167
+ end
168
+
169
+
170
+ def extract_field_alias!(entry, klass)
171
+ unless entry['as']
172
+ entry['as'] = entry['field']
173
+ end
174
+ end
175
+
176
+
177
+ def extract_table_alias!(entry, klass)
178
+ unless entry['table_alias']
179
+ entry['table_alias'] = if entry['field'] and entry['field'].include? "." and entry['association_sql']
180
+ # This field is referenced by a table alias in association_sql
181
+ table_alias, entry['field'] = entry['field'].split(".")
182
+ table_alias
183
+ elsif get_association(klass, entry)
184
+ # Refers to the association
185
+ get_association(klass, entry).name
186
+ elsif entry['association_sql']
187
+ # Refers to the association_sql class's table
188
+ entry['class_name'].constantize.table_name
189
+ else
190
+ # Refers to this class
191
+ klass.table_name
192
+ end
193
+ end
194
+ end
195
+
196
+ end
197
+ end
198
+