datastax_rails 1.2.3 → 2.0.3

Sign up to get free protection for your applications and to get access to all the features.
Files changed (104) hide show
  1. checksums.yaml +4 -4
  2. data/MIT-LICENSE +1 -1
  3. data/README.rdoc +20 -8
  4. data/config/schema.xml.erb +22 -19
  5. data/config/solrconfig.xml.erb +1 -1
  6. data/lib/cql-rb_extensions.rb +27 -0
  7. data/lib/datastax_rails.rb +13 -17
  8. data/lib/datastax_rails/associations/association.rb +1 -4
  9. data/lib/datastax_rails/associations/collection_proxy.rb +0 -13
  10. data/lib/datastax_rails/attribute_assignment.rb +28 -91
  11. data/lib/datastax_rails/attribute_methods.rb +109 -44
  12. data/lib/datastax_rails/attribute_methods/before_type_cast.rb +71 -0
  13. data/lib/datastax_rails/attribute_methods/dirty.rb +52 -11
  14. data/lib/datastax_rails/attribute_methods/primary_key.rb +87 -0
  15. data/lib/datastax_rails/attribute_methods/read.rb +120 -0
  16. data/lib/datastax_rails/attribute_methods/typecasting.rb +52 -21
  17. data/lib/datastax_rails/attribute_methods/write.rb +59 -0
  18. data/lib/datastax_rails/base.rb +227 -236
  19. data/lib/datastax_rails/cassandra_only_model.rb +25 -19
  20. data/lib/datastax_rails/column.rb +384 -0
  21. data/lib/datastax_rails/connection.rb +12 -13
  22. data/lib/datastax_rails/cql/alter_column_family.rb +0 -1
  23. data/lib/datastax_rails/cql/base.rb +15 -3
  24. data/lib/datastax_rails/cql/column_family.rb +2 -2
  25. data/lib/datastax_rails/cql/create_column_family.rb +7 -18
  26. data/lib/datastax_rails/cql/delete.rb +4 -9
  27. data/lib/datastax_rails/cql/insert.rb +2 -8
  28. data/lib/datastax_rails/cql/select.rb +4 -4
  29. data/lib/datastax_rails/cql/update.rb +8 -17
  30. data/lib/datastax_rails/dynamic_model.rb +98 -0
  31. data/lib/datastax_rails/payload_model.rb +19 -31
  32. data/lib/datastax_rails/persistence.rb +39 -54
  33. data/lib/datastax_rails/railtie.rb +1 -0
  34. data/lib/datastax_rails/reflection.rb +1 -1
  35. data/lib/datastax_rails/relation.rb +20 -20
  36. data/lib/datastax_rails/relation/batches.rb +18 -16
  37. data/lib/datastax_rails/relation/facet_methods.rb +1 -1
  38. data/lib/datastax_rails/relation/finder_methods.rb +6 -10
  39. data/lib/datastax_rails/relation/search_methods.rb +62 -48
  40. data/lib/datastax_rails/rsolr_client_wrapper.rb +1 -1
  41. data/lib/datastax_rails/schema/cassandra.rb +34 -62
  42. data/lib/datastax_rails/schema/migrator.rb +9 -24
  43. data/lib/datastax_rails/schema/solr.rb +13 -30
  44. data/lib/datastax_rails/schema_cache.rb +67 -0
  45. data/lib/datastax_rails/timestamps.rb +84 -11
  46. data/lib/datastax_rails/types/dirty_collection.rb +88 -0
  47. data/lib/datastax_rails/types/dynamic_list.rb +14 -0
  48. data/lib/datastax_rails/types/dynamic_map.rb +32 -0
  49. data/lib/datastax_rails/types/dynamic_set.rb +10 -0
  50. data/lib/datastax_rails/util/solr_repair.rb +4 -5
  51. data/lib/datastax_rails/validations.rb +6 -12
  52. data/lib/datastax_rails/validations/uniqueness.rb +0 -4
  53. data/lib/datastax_rails/version.rb +1 -1
  54. data/lib/datastax_rails/wide_storage_model.rb +13 -29
  55. data/lib/schema_migration.rb +4 -0
  56. data/spec/datastax_rails/associations_spec.rb +0 -1
  57. data/spec/datastax_rails/attribute_methods_spec.rb +9 -6
  58. data/spec/datastax_rails/base_spec.rb +26 -0
  59. data/spec/datastax_rails/column_spec.rb +238 -0
  60. data/spec/datastax_rails/cql/select_spec.rb +1 -1
  61. data/spec/datastax_rails/cql/update_spec.rb +2 -2
  62. data/spec/datastax_rails/persistence_spec.rb +29 -15
  63. data/spec/datastax_rails/relation/batches_spec.rb +5 -5
  64. data/spec/datastax_rails/relation/finder_methods_spec.rb +0 -20
  65. data/spec/datastax_rails/relation/search_methods_spec.rb +8 -0
  66. data/spec/datastax_rails/relation_spec.rb +7 -0
  67. data/spec/datastax_rails/schema/migrator_spec.rb +5 -10
  68. data/spec/datastax_rails/schema/solr_spec.rb +1 -1
  69. data/spec/datastax_rails/types/dynamic_list_spec.rb +20 -0
  70. data/spec/datastax_rails/types/dynamic_map_spec.rb +22 -0
  71. data/spec/datastax_rails/types/dynamic_set_spec.rb +16 -0
  72. data/spec/dummy/config/application.rb +2 -1
  73. data/spec/dummy/config/datastax.yml +6 -3
  74. data/spec/dummy/config/environments/development.rb +4 -5
  75. data/spec/dummy/config/environments/test.rb +0 -5
  76. data/spec/dummy/log/development.log +18 -0
  77. data/spec/dummy/log/test.log +36 -0
  78. data/spec/feature/dynamic_fields_spec.rb +9 -0
  79. data/spec/feature/overloaded_tables_spec.rb +24 -0
  80. data/spec/spec_helper.rb +1 -1
  81. data/spec/support/default_consistency_shared_examples.rb +2 -2
  82. data/spec/support/models.rb +28 -14
  83. metadata +212 -188
  84. data/lib/datastax_rails/identity.rb +0 -64
  85. data/lib/datastax_rails/identity/abstract_key_factory.rb +0 -29
  86. data/lib/datastax_rails/identity/custom_key_factory.rb +0 -37
  87. data/lib/datastax_rails/identity/hashed_natural_key_factory.rb +0 -10
  88. data/lib/datastax_rails/identity/natural_key_factory.rb +0 -39
  89. data/lib/datastax_rails/identity/uuid_key_factory.rb +0 -27
  90. data/lib/datastax_rails/type.rb +0 -16
  91. data/lib/datastax_rails/types.rb +0 -9
  92. data/lib/datastax_rails/types/array_type.rb +0 -86
  93. data/lib/datastax_rails/types/base_type.rb +0 -42
  94. data/lib/datastax_rails/types/binary_type.rb +0 -19
  95. data/lib/datastax_rails/types/boolean_type.rb +0 -22
  96. data/lib/datastax_rails/types/date_type.rb +0 -23
  97. data/lib/datastax_rails/types/float_type.rb +0 -18
  98. data/lib/datastax_rails/types/integer_type.rb +0 -18
  99. data/lib/datastax_rails/types/string_type.rb +0 -16
  100. data/lib/datastax_rails/types/text_type.rb +0 -15
  101. data/lib/datastax_rails/types/time_type.rb +0 -23
  102. data/spec/datastax_rails/types/float_type_spec.rb +0 -31
  103. data/spec/datastax_rails/types/integer_type_spec.rb +0 -31
  104. data/spec/datastax_rails/types/time_type_spec.rb +0 -28
@@ -20,7 +20,8 @@ module DatastaxRails
20
20
  if keys.last.is_a?(Hash)
21
21
  options = keys.pop
22
22
  end
23
- ActiveSupport::Notifications.instrument("remove.datastax_rails", :column_family => column_family, :key => key) do
23
+ keys = keys.flatten.collect {|k| self.attribute_definitions[self.primary_key].type_cast(k)}
24
+ ActiveSupport::Notifications.instrument("remove.datastax_rails", :column_family => column_family, :key => keys) do
24
25
  c = cql.delete(keys)
25
26
  if(options[:consistency])
26
27
  level = options[:consistency].to_s.upcase
@@ -50,24 +51,22 @@ module DatastaxRails
50
51
 
51
52
  # Write a record to cassandra. Can be either an insert or an update (they are exactly the same to cassandra)
52
53
  #
53
- # @param [String] key the primary key for the record
54
- # @param [Hash] attributes a hash containing the columns to set on the record
54
+ # @param [DatastaxRails::Base] record the record that we are writing
55
55
  # @param [Hash] options a hash containing various options
56
56
  # @option options [Symbol] :consistency the consistency to set for the Cassandra operation (e.g., ALL)
57
- def write(key, attributes, options = {})
58
- attributes = encode_attributes(attributes)
57
+ def write(record, options = {})
59
58
  level = (options[:consistency] || self.default_consistency).to_s.upcase
60
59
  if(valid_consistency?(level))
61
60
  options[:consistency] = level
62
61
  else
63
62
  raise ArgumentError, "'#{level}' is not a valid Cassandra consistency level"
64
63
  end
65
- key.tap do |key|
66
- ActiveSupport::Notifications.instrument("insert.datastax_rails", :column_family => column_family, :key => key, :attributes => attributes) do
64
+ record.id.tap do |key|
65
+ ActiveSupport::Notifications.instrument("insert.datastax_rails", :column_family => column_family, :key => key.to_s, :attributes => record.attributes) do
67
66
  if(self.storage_method == :solr)
68
- write_with_solr(key, attributes, options)
67
+ write_with_solr(record, options)
69
68
  else
70
- write_with_cql(key, attributes, options)
69
+ write_with_cql(record, options)
71
70
  end
72
71
  end
73
72
  end
@@ -81,49 +80,40 @@ module DatastaxRails
81
80
  # to build this object. Used so that we can avoid lazy-loading attributes that don't exist.
82
81
  # @return [DatastaxRails::Base] a model with the given attributes
83
82
  def instantiate(key, attributes, selected_attributes = [])
84
- allocate.tap do |object|
85
- object.instance_variable_set("@loaded_attributes", {}.with_indifferent_access)
86
- object.instance_variable_set("@key", parse_key(key)) if key
87
- object.instance_variable_set("@new_record", false)
88
- object.instance_variable_set("@destroyed", false)
89
- object.instance_variable_set("@attributes", typecast_attributes(object, attributes, selected_attributes).with_indifferent_access)
90
- end
83
+ allocate.init_with('attributes' => attributes)
91
84
  end
92
-
85
+
93
86
  # Encodes the attributes in preparation for storing in cassandra. Calls the coders on the various type classes
94
87
  # to do the heavy lifting.
95
88
  #
96
- # @param [Hash] attributes a hash containing the attributes to be encoded for storage
89
+ # @param [DatastaxRails::Base] record the record whose attributes we're encoding
90
+ # @param [Boolean] cql True if we're formatting for CQL, otherwise False
97
91
  # @return [Hash] a new hash with attributes encoded for storage
98
- def encode_attributes(attributes)
92
+ def encode_attributes(record, cql)
99
93
  encoded = {}
100
- attributes.each do |column_name, value|
101
- encoded[column_name.to_s] = attribute_definitions[column_name.to_sym].coder.encode(value)
94
+ Types::DirtyCollection.ignore_modifications do
95
+ record.changed.each do |column_name|
96
+ value = record.read_attribute(column_name)
97
+ encoded[column_name.to_s] = cql ? attribute_definitions[column_name].type_cast_for_cql3(value) :
98
+ attribute_definitions[column_name].type_cast_for_solr(value)
99
+ end
102
100
  end
103
101
  encoded
104
102
  end
105
-
106
- def typecast_attributes(object, attributes, selected_attributes = [])
107
- attributes = attributes.symbolize_keys
108
- casted = {}
109
-
110
- selected_attributes.each do |att|
111
- object.loaded_attributes[att] = true
112
- end
113
-
114
- attribute_definitions.each do |k,definition|
115
- casted[k.to_s] = definition.instantiate(object, attributes[k.to_sym])#.to_s
116
- end
117
- casted
118
- end
119
103
 
120
104
  private
121
- def write_with_cql(key, attributes, options)
122
- cql.update(key.to_s).columns(attributes).using(options[:consistency]).execute
105
+ def write_with_cql(record, options)
106
+ encoded = encode_attributes(record, true)
107
+ if options[:new_record]
108
+ cql.insert.columns(encoded).using(options[:consistency]).execute
109
+ else
110
+ cql.update(record.id).columns(encoded).using(options[:consistency]).execute
111
+ end
123
112
  end
124
113
 
125
- def write_with_solr(key, attributes, options)
126
- xml_doc = RSolr::Xml::Generator.new.add(attributes.merge(:id => key))
114
+ def write_with_solr(record, options)
115
+ encoded = encode_attributes(record, false)
116
+ xml_doc = RSolr::Xml::Generator.new.add(encoded.merge(self.primary_key => record.id.to_s))
127
117
  self.solr_connection.update(:data => xml_doc, :params => {:replacefields => false, :cl => options[:consistency]})
128
118
  end
129
119
  end
@@ -153,7 +143,7 @@ module DatastaxRails
153
143
  end
154
144
 
155
145
  def destroy(options = {})
156
- self.class.remove(key, options)
146
+ self.class.remove(id, options)
157
147
  @destroyed = true
158
148
  freeze
159
149
  end
@@ -173,20 +163,15 @@ module DatastaxRails
173
163
 
174
164
  # Updates the attributes of the model from the passed-in hash and saves the
175
165
  # record If the object is invalid, the saving will fail and false will be returned.
176
- #
177
- # When updating model attributes, mass-assignment security protection is respected.
178
- # If no +:as+ option is supplied then the +:default+ role will be used.
179
- # If you want to bypass the protection given by +attr_protected+ and
180
- # +attr_accessible+ then you can do so using the +:without_protection+ option.
181
166
  def update_attributes(attributes, options = {})
182
- self.assign_attributes(attributes, options)
167
+ self.assign_attributes(attributes)
183
168
  save
184
169
  end
185
170
 
186
171
  # Updates its receiver just like +update_attributes+ but calls <tt>save!</tt> instead
187
172
  # of +save+, so an exception is raised if the record is invalid.
188
173
  def update_attributes!(attributes, options = {})
189
- self.assign_attributes(attributes, options)
174
+ self.assign_attributes(attributes)
190
175
  save!
191
176
  end
192
177
 
@@ -223,26 +208,26 @@ module DatastaxRails
223
208
 
224
209
  private
225
210
  def _create_or_update(options)
226
- result = new_record? ? _create(options) : _update(options)
211
+ result = new_record? ? _create_record(options) : _update_record(options)
227
212
  result != false
228
213
  end
229
214
 
230
- def _create(options)
231
- @key ||= self.class.next_key(self)
215
+ def _create_record(options)
216
+ # TODO: handle the non-UUID case
217
+ self.id ||= ::Cql::TimeUuid::Generator.new.next
232
218
  _write(options)
233
219
  @new_record = false
234
- @key
220
+ self.id
235
221
  end
236
222
 
237
- def _update(options)
223
+ def _update_record(options)
238
224
  _write(options)
239
225
  end
240
226
 
241
227
  def _write(options) #:nodoc:
242
228
  options[:new_record] = new_record?
243
- changed_attributes = changed.inject({}) { |h, n| h[n] = read_attribute(n); h }
244
229
  return true if changed_attributes.empty?
245
- self.class.write(key, changed_attributes, options)
230
+ self.class.write(self, options)
246
231
  end
247
232
  end
248
233
  end
@@ -2,6 +2,7 @@ require 'rubygems'
2
2
  require 'datastax_rails'
3
3
  require 'rails'
4
4
  require 'action_controller/railtie'
5
+ require 'yaml'
5
6
 
6
7
  module DatastaxRails
7
8
  class Railtie < Rails::Railtie
@@ -314,7 +314,7 @@ module DatastaxRails
314
314
  # Gets the source of the through reflection. It checks both a singularized
315
315
  # and pluralized form for <tt>:belongs_to</tt> or <tt>:has_many</tt>.
316
316
  #
317
- # class Post < ActiveRecord::Base
317
+ # class Post < DatastaxRails::Base
318
318
  # has_many :taggings
319
319
  # has_many :tags, :through => :taggings
320
320
  # end
@@ -250,8 +250,6 @@ module DatastaxRails
250
250
  when true
251
251
  return :solr
252
252
  else
253
- # If we've already decided to use cassandra, just go with it.
254
- return :cassandra unless use_solr_value
255
253
  [order_values, where_not_values, fulltext_values, greater_than_values, less_than_values, field_facet_values,
256
254
  range_facet_values, group_value].each do |solr_only_stuff|
257
255
  return :solr unless solr_only_stuff.blank?
@@ -260,8 +258,7 @@ module DatastaxRails
260
258
  return :solr unless page_value == 1
261
259
  @where_values.each do |wv|
262
260
  wv.each do |k,v|
263
- next if k.to_sym == :id
264
- if(klass.attribute_definitions[k].indexed == :solr || !klass.attribute_definitions[k].indexed)
261
+ unless klass.column_for_attribute(k).options[:cql_index]
265
262
  return :solr
266
263
  end
267
264
  end
@@ -292,7 +289,7 @@ module DatastaxRails
292
289
  cql.conditions(wv)
293
290
  end
294
291
  cql.allow_filtering if @allow_filtering_value
295
- CassandraCQL::Result.new(cql.execute).fetch['count']
292
+ cql.execute.first['count']
296
293
  end
297
294
 
298
295
  # Constructs a CQL query and runs it against Cassandra directly. For this to
@@ -300,15 +297,15 @@ module DatastaxRails
300
297
  # For ad-hoc queries, you will have to use Solr.
301
298
  def query_via_cql
302
299
  select_columns = select_values.empty? ? (@klass.attribute_definitions.keys - @klass.lazy_attributes) : select_values.flatten
303
- cql = @cql.select((select_columns + @klass.key_factory.key_columns).uniq)
300
+ cql = @cql.select((select_columns + [@klass.primary_key]).uniq)
304
301
  cql.using(@consistency_value) if @consistency_value
305
302
  @where_values.each do |wv|
306
- cql.conditions(Hash[wv.map {|k,v| [(k.to_sym == :id ? :key : k), v]}])
303
+ cql.conditions(Hash[wv.map {|k,v| [(k.to_s == 'id' ? @klass.primary_key : k), v]}])
307
304
  end
308
305
  @greater_than_values.each do |gtv|
309
306
  gtv.each do |k,v|
310
307
  # Special case if inequality is equal to the primary key (we're paginating)
311
- if(k == :key)
308
+ if(k.to_s == @klass.primary_key)
312
309
  cql.paginate(v)
313
310
  end
314
311
  end
@@ -319,10 +316,10 @@ module DatastaxRails
319
316
  cql.allow_filtering if @allow_filtering_value
320
317
  results = []
321
318
  begin
322
- CassandraCQL::Result.new(cql.execute).fetch do |row|
323
- results << @klass.instantiate(row['key'], row.to_hash, select_columns)
319
+ cql.execute.each do |row|
320
+ results << @klass.instantiate(row[@klass.primary_key], row, select_columns)
324
321
  end
325
- rescue CassandraCQL::Error::InvalidRequestException => e
322
+ rescue ::Cql::CqlError => e # TODO: Break out the various exception types
326
323
  # If we get an exception about an empty key, ignore it. We'll return an empty set.
327
324
  if e.message =~ /Key may not be empty/
328
325
  # No-Op
@@ -377,7 +374,7 @@ module DatastaxRails
377
374
 
378
375
  def full_solr_range(attr)
379
376
  if(self.klass.attribute_definitions[attr])
380
- self.klass.attribute_definitions[attr].coder.full_solr_range
377
+ self.klass.attribute_definitions[attr].full_solr_range
381
378
  else
382
379
  '[\"\" TO *]'
383
380
  end
@@ -495,8 +492,9 @@ module DatastaxRails
495
492
  end
496
493
 
497
494
  select_columns = select_values.empty? ? (@klass.attribute_definitions.keys - @klass.lazy_attributes) : select_values.flatten
498
- select_columns << "id"
499
- params[:fl] = select_columns.collect(&:to_s).join(",")
495
+ select_columns << @klass.primary_key
496
+ select_columns.collect! {|c| @klass.column_for_attribute(c).try(:type) == :map ? "#{c.to_s}*" : c.to_s}
497
+ params[:fl] = select_columns.uniq.join(",")
500
498
  unless(@stats_values.empty?)
501
499
  params[:stats] = 'true'
502
500
  @stats_values.flatten.each do |sv|
@@ -556,12 +554,11 @@ module DatastaxRails
556
554
  results.current_page = @page_value || 1
557
555
  results.total_entries = response['numFound'].to_i
558
556
  response['docs'].each do |doc|
559
- id = doc['id']
557
+ id = @klass.attribute_definitions[@klass.primary_key].type_cast(doc[@klass.primary_key])
560
558
  if(@consistency_value)
561
559
  obj = @klass.with_cassandra.consistency(@consistency_value).find_by_id(id)
562
560
  results << obj if obj
563
561
  else
564
- #byebug
565
562
  results << @klass.instantiate(id, doc, select_columns)
566
563
  end
567
564
  end
@@ -607,20 +604,23 @@ module DatastaxRails
607
604
  rsolr.commit :commit_attributes => {}
608
605
  end
609
606
 
607
+ SOLR_DATE_REGEX = /(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}Z)/i
610
608
  # Everything that gets indexed into solr is downcased as part of the analysis phase.
611
609
  # Normally, this is done to the query as well, but if your query includes wildcards
612
610
  # then analysis isn't performed. This means that the query does not get downcased.
613
611
  # We therefore need to perform the downcasing ourselves. This does it while still
614
- # leaving boolean operations (AND, OR, NOT) upcased.
612
+ # leaving boolean operations (AND, OR, NOT, TO) and dates upcased.
615
613
  def downcase_query(value)
616
614
  if(value.is_a?(String))
617
615
  value.split(/\bAND\b/).collect do |a|
618
616
  a.split(/\bOR\b/).collect do |o|
619
- o.split(/\bNOT\b/).collect do |n|
620
- n.downcase
617
+ o.split(/\bNOT\b/).collect do |n|
618
+ n.split(/\bTO\b/).collect do |t|
619
+ t.downcase
620
+ end.join("TO")
621
621
  end.join("NOT")
622
622
  end.join("OR")
623
- end.join("AND")
623
+ end.join("AND").gsub(SOLR_DATE_REGEX) { $1.upcase }
624
624
  else
625
625
  value
626
626
  end
@@ -6,7 +6,7 @@ module DatastaxRails
6
6
  #
7
7
  # Example:
8
8
  #
9
- # Person.where("age > 21").find_each do |person|
9
+ # Person.where(in_college: true).find_each do |person|
10
10
  # person.party_all_night!
11
11
  # end
12
12
  #
@@ -15,6 +15,9 @@ module DatastaxRails
15
15
  # you just need to loop over less than 1000 records, it's probably
16
16
  # better just to use the regular find methods.
17
17
  #
18
+ # You can also pass the +:start+ option to specify an offset to
19
+ # control the starting point.
20
+ #
18
21
  # @param options [Hash] finder options
19
22
  # @yield [record] a single DatastaxRails record
20
23
  def find_each(options = {})
@@ -23,6 +26,7 @@ module DatastaxRails
23
26
  end
24
27
  end
25
28
 
29
+ # Same as {find_each} but yields the index as a second parameter.
26
30
  def find_each_with_index(options = {})
27
31
  idx = 0
28
32
  find_in_batches(options) do |records|
@@ -44,30 +48,29 @@ module DatastaxRails
44
48
  # worker 2 handle from 10,000 and beyond (by setting the <tt>:start</tt>
45
49
  # option on that worker).
46
50
  #
47
- # It's not possible to set the order. That is automatically set according
48
- # Cassandra's key placement strategy. Records are retrieved and returned
49
- # using only Cassandra and no SOLR interaction. This also mean that this
50
- # method only works with any type of primary key (unlike ActiveRecord).
51
- # You can't set the limit, however. That's used to control the batch sizes.
51
+ # It's not possible to set the order. For Cassandra based batching, the
52
+ # order is set according to Cassandra's key placement strategy. For Solr
53
+ # based batching, the order is ascending order of the primary key.
54
+ # You can't set the limit either. That's used to control the batch sizes.
52
55
  #
53
56
  # Example:
54
57
  #
55
- # Person.where("age > 21").find_in_batches do |group|
58
+ # Person.where(in_college: true).find_in_batches do |group|
56
59
  # sleep(50) # Make sure it doesn't get too crowded in there!
57
60
  # group.each { |person| person.party_all_night! }
58
61
  # end
59
62
  #
60
63
  # @param options [Hash] finder options
61
- # @yeild [records] a batch of DatastaxRails records
64
+ # @yield [records] a batch of DatastaxRails records
62
65
  def find_in_batches(options = {})
63
66
  relation = self
64
67
 
65
- unless (@order_values.empty? || @order_values == [{:created_at => :asc}])
68
+ unless (@order_values.empty?)
66
69
  DatastaxRails::Base.logger.warn("Scoped order and limit are ignored, it's forced to be batch order and batch size")
67
70
  end
68
71
 
69
72
  if (finder_options = options.except(:start, :batch_size)).present?
70
- raise "You can't specify an order, it's forced to be #{relation.use_solr_value ? "created_at" : "key"}" if options[:order].present?
73
+ raise "You can't specify an order, it's forced to be #{@klass.primary_key}" if options[:order].present?
71
74
  raise "You can't specify a limit, it's forced to be the batch_size" if options[:limit].present?
72
75
 
73
76
  relation = apply_finder_options(finder_options)
@@ -76,21 +79,20 @@ module DatastaxRails
76
79
  start = options.delete(:start)
77
80
  batch_size = options.delete(:batch_size) || 1000
78
81
 
79
- batch_order = (relation.use_solr_value ? :created_at : :key)
80
82
  relation = relation.limit(batch_size)
81
- relation = relation.order(batch_order) if relation.use_solr_value
82
- records = start ? relation.where(batch_order).greater_than(start).to_a : relation.to_a
83
+ relation = relation.order(@klass.primary_key) if relation.use_solr_value
84
+ records = start ? relation.where(@klass.primary_key).greater_than(start).to_a : relation.to_a
83
85
  while records.size > 0
84
86
  records_size = records.size
85
- offset = relation.use_solr_value ? records.last.created_at.to_time : records.last.id
87
+ offset = records.last.id
86
88
  yield records
87
89
 
88
90
  break if records_size < batch_size
89
91
  if offset
90
92
  if relation.use_solr_value
91
- offset += 1
93
+ offset = ::Cql::Uuid.new(offset.value+1)
92
94
  end
93
- records = relation.where(batch_order).greater_than(offset).to_a
95
+ records = relation.where(@klass.primary_key).greater_than(offset).to_a
94
96
  else
95
97
  raise "Batch order not included in the custom select clause"
96
98
  end
@@ -1,7 +1,7 @@
1
1
  module DatastaxRails
2
2
  module FacetMethods
3
3
  # Instructs SOLR to get facet counts on the passed in field. Results are available in the facets accessor.
4
- # Facets include Field and Range (Date is not supported as it is depricated in Solr).
4
+ # Facets include Field and Range (Date is not supported as it is depricated in Solr - use a range instead).
5
5
  #
6
6
  # results = Article.field_facet(:author)
7
7
  # results.facets => {"author"=>["vonnegut", 2. "asimov", 3]}
@@ -140,15 +140,8 @@ module DatastaxRails
140
140
  def find_by_attributes(match, attributes, *args) #:nodoc:
141
141
 
142
142
  conditions = Hash[attributes.map {|a| [a, args[attributes.index(a)]]}]
143
- if Rails.version =~ /^3.*/
144
- self.where_values << escape_attributes(conditions)
145
- result = self.send(match.finder)
146
- elsif Rails.version =~ /^4.*/
147
- result = self.send(match.finder, conditions)
148
- end
143
+ result = self.send(match.finder, conditions)
149
144
 
150
- #result = where(conditions).send(match.finder)
151
-
152
145
  if match.blank? && result.blank?
153
146
  raise RecordNotFound, "Couldn't find #{klass.name} with #{conditions.to_a.collect {|p| p.join('=')}.join(', ')}"
154
147
  else
@@ -191,11 +184,14 @@ module DatastaxRails
191
184
  end
192
185
 
193
186
  def find_one(id)
194
- with_cassandra.where(:key => id).first || raise(RecordNotFound, "Couldn't find #{@klass.name} with ID=#{id}")
187
+ key = @klass.attribute_definitions[@klass.primary_key].type_cast(id) || raise(RecordNotFound, "Couldn't find #{@klass.name} with an invalid ID=#{id}")
188
+
189
+ with_cassandra.where(@klass.primary_key => key).first || raise(RecordNotFound, "Couldn't find #{@klass.name} with ID=#{id}")
195
190
  end
196
191
 
197
192
  def find_some(ids)
198
- result = with_cassandra.where(:key => ids).all
193
+ keys = ids.collect {|id| @klass.attribute_definitions[@klass.primary_key].type_cast(id) || "Couldn't find #{@klass.name} with an invalid ID=#{id}"}
194
+ result = with_cassandra.where(@klass.primary_key => keys).all
199
195
 
200
196
  expected_size =
201
197
  if @limit_value && ids.size > @limit_value