datastax_rails 1.0.19.0 → 1.1.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. checksums.yaml +7 -0
  2. data/README.rdoc +13 -2
  3. data/config/solrconfig.xml +3 -0
  4. data/lib/datastax_rails/associations/collection_association.rb +31 -0
  5. data/lib/datastax_rails/attribute_methods/definition.rb +2 -2
  6. data/lib/datastax_rails/base.rb +3 -7
  7. data/lib/datastax_rails/connection.rb +1 -0
  8. data/lib/datastax_rails/cql/alter_column_family.rb +9 -0
  9. data/lib/datastax_rails/cql/base.rb +2 -1
  10. data/lib/datastax_rails/cql/create_column_family.rb +3 -3
  11. data/lib/datastax_rails/cql/create_index.rb +25 -0
  12. data/lib/datastax_rails/cql/create_keyspace.rb +3 -3
  13. data/lib/datastax_rails/cql/delete.rb +3 -3
  14. data/lib/datastax_rails/cql/drop_index.rb +13 -0
  15. data/lib/datastax_rails/cql/insert.rb +2 -2
  16. data/lib/datastax_rails/cql/select.rb +2 -2
  17. data/lib/datastax_rails/cql/update.rb +20 -20
  18. data/lib/datastax_rails/cql.rb +2 -0
  19. data/lib/datastax_rails/persistence.rb +2 -10
  20. data/lib/datastax_rails/railtie.rb +7 -0
  21. data/lib/datastax_rails/relation/batches.rb +23 -10
  22. data/lib/datastax_rails/relation/facet_methods.rb +17 -0
  23. data/lib/datastax_rails/relation/finder_methods.rb +2 -2
  24. data/lib/datastax_rails/relation/search_methods.rb +1 -1
  25. data/lib/datastax_rails/relation.rb +14 -6
  26. data/lib/datastax_rails/tasks/column_family.rb +97 -18
  27. data/lib/datastax_rails/tasks/ds.rake +11 -0
  28. data/lib/datastax_rails/types/array_type.rb +1 -1
  29. data/lib/datastax_rails/types/boolean_type.rb +1 -1
  30. data/lib/datastax_rails/types/date_type.rb +1 -1
  31. data/lib/datastax_rails/types/float_type.rb +1 -1
  32. data/lib/datastax_rails/types/integer_type.rb +1 -1
  33. data/lib/datastax_rails/types/string_type.rb +2 -2
  34. data/lib/datastax_rails/types/text_type.rb +3 -4
  35. data/lib/datastax_rails/types/time_type.rb +1 -1
  36. data/lib/datastax_rails/validations/associated.rb +43 -0
  37. data/lib/datastax_rails/validations.rb +14 -2
  38. data/lib/datastax_rails/version.rb +1 -1
  39. data/lib/datastax_rails.rb +14 -14
  40. data/spec/datastax_rails/associations/has_many_association_spec.rb +1 -0
  41. data/spec/datastax_rails/base_spec.rb +6 -0
  42. data/spec/datastax_rails/cql/select_spec.rb +3 -3
  43. data/spec/datastax_rails/cql/update_spec.rb +2 -2
  44. data/spec/datastax_rails/persistence_spec.rb +16 -12
  45. data/spec/datastax_rails/relation/batches_spec.rb +20 -16
  46. data/spec/datastax_rails/relation/finder_methods_spec.rb +2 -2
  47. data/spec/dummy/log/test.log +3316 -0
  48. data/spec/spec.opts +0 -1
  49. data/spec/support/connection_double.rb +6 -0
  50. data/spec/support/default_consistency_shared_examples.rb +4 -2
  51. metadata +86 -107
checksums.yaml ADDED
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA1:
3
+ metadata.gz: 315f2c0bed2d3ebd73b696212d09be65be6071f1
4
+ data.tar.gz: 3c7f2f05d532f73ec40128bf748f032cb7825ec4
5
+ SHA512:
6
+ metadata.gz: 956823938bdc95ac435bd57a7ec19cd6d1f565790847d10117fb86f30a2db78cbbe3ca0466db66632c9cd6a826bc1564146b687ac15027e15e8de54639698754
7
+ data.tar.gz: 0c082e39d39012a3c79dbed78180433dee4cd9bd0138aa3c9116aa03a267a5dff3f009973a9f86fb323e3a9a40254efd4d1afa8762f39ec72e56ca3909aed5aa
data/README.rdoc CHANGED
@@ -70,10 +70,21 @@ Once you've created some models, the following will upload the solr schemas and
70
70
  It is safe to run ds:schema over and over. In fact, it is necessary to re-run it any time you change the
71
71
  attributes on any model. DSR will only upload schema files if they have changed.
72
72
 
73
+ Create a sample Model. See Base documentation for more details:
74
+
75
+ class Person < DatastaxRails::Base
76
+ key :uuid
77
+ string :first_name
78
+ string :user_name
79
+ text :bio
80
+ date :birthdate
81
+ boolean :active
82
+ timestamps
83
+ end
84
+
73
85
  === Known issues
74
86
 
75
- Setting an integer field to something other than an integer results in nothing being set and no validation error
76
- (if you were using one).
87
+ Trying to set a value to nil via the solr API results in the field value not changing at all. Updating it via CQL doesn't have this issue.
77
88
 
78
89
  === More information
79
90
 
@@ -48,6 +48,9 @@
48
48
  affect both how text is indexed and queried.
49
49
  -->
50
50
  <luceneMatchVersion>LUCENE_40</luceneMatchVersion>
51
+
52
+ <!-- Enable DSE Search new type mappings -->
53
+ <dseTypeMappingVersion>0</dseTypeMappingVersion>
51
54
 
52
55
  <!-- lib directives can be used to instruct Solr to load an Jars
53
56
  identified and use them to resolve any "plugins" specified in
@@ -154,6 +154,37 @@ module DatastaxRails
154
154
  record
155
155
  end
156
156
 
157
+ # Replace this collection with +other_array+
158
+ # This will perform a diff and delete/add only records that have changed.
159
+ def replace(other_array)
160
+ other_array.each { |val| raise_on_type_mismatch(val) }
161
+ original_target = load_target.dup
162
+
163
+ delete(target - other_array)
164
+
165
+ unless concat(other_array - target)
166
+ @target = original_target
167
+ raise RecordNotSaved, "Failed to replace #{reflection.name} because one or more of the " \
168
+ "new records could not be saved."
169
+ end
170
+ end
171
+
172
+ # Add +records+ to this association. Returns +self+ so method calls may be chained.
173
+ # Since << flattens its argument list and inserts each record, +push+ and +concat+ behave identically.
174
+ def concat(*records)
175
+ result = true
176
+ load_target if owner.new_record?
177
+
178
+ records.flatten.each do |record|
179
+ raise_on_type_mismatch(record)
180
+ add_to_target(record) do |r|
181
+ result &&= insert_record(record) unless owner.new_record?
182
+ end
183
+ end
184
+
185
+ result && records
186
+ end
187
+
157
188
  private
158
189
 
159
190
  # We have some records loaded from the database (persisted) and some that are
@@ -9,8 +9,8 @@ module DatastaxRails
9
9
  end
10
10
 
11
11
  def instantiate(record, value)
12
- value ||= coder.default
13
- return unless value
12
+ value = coder.default if value.nil?
13
+ return if value.nil?
14
14
 
15
15
  value = coder.decode(value)
16
16
  coder.wrap(record, name, value)
@@ -324,7 +324,7 @@ module DatastaxRails #:nodoc:
324
324
  self.default_consistency = :quorum
325
325
 
326
326
  class_attribute :storage_method
327
- self.storage_method = :solr
327
+ self.storage_method = :cql
328
328
 
329
329
  class_attribute :models
330
330
  self.models = []
@@ -479,7 +479,7 @@ module DatastaxRails #:nodoc:
479
479
  delegate :count, :first, :first!, :last, :last!, :compute_stats, :to => :scoped
480
480
  delegate :sum, :average, :minimum, :maximum, :stddev, :to => :scoped
481
481
  delegate :cql, :with_cassandra, :with_solr, :commit_solr, :to => :scoped
482
- delegate :find_each, :find_in_batches, :to => :scoped
482
+ delegate :find_each, :find_in_batches, :consistency, :to => :scoped
483
483
 
484
484
  # Sets the column family name
485
485
  #
@@ -493,7 +493,7 @@ module DatastaxRails #:nodoc:
493
493
  #
494
494
  # Returns [String] the name of the column family
495
495
  def column_family
496
- @column_family || name.pluralize
496
+ @column_family || name.underscore.pluralize
497
497
  end
498
498
 
499
499
  def payload_model?
@@ -508,10 +508,6 @@ module DatastaxRails #:nodoc:
508
508
  klass
509
509
  end
510
510
 
511
- # def find(*keys)
512
- # scoped.with_cassandra.find(keys)
513
- # end
514
-
515
511
  def find_by_id(id)
516
512
  scoped.with_cassandra.find(id)
517
513
  rescue RecordNotFound
@@ -1,6 +1,7 @@
1
1
  # require 'datastax_rails/rsolr_client_wrapper'
2
2
  require 'rsolr/client_cert'
3
3
  require 'rest_client'
4
+ require "cassandra-cql/1.2"
4
5
  module DatastaxRails
5
6
  # The connection module holds all the code for establishing and maintaining a connection to
6
7
  # Datastax Exterprise. This includes both the Cassandra and Solr connections.
@@ -4,6 +4,7 @@ module DatastaxRails#:nodoc:
4
4
  def initialize(cf_name)
5
5
  @cf_name = cf_name
6
6
  @action = nil
7
+ @consistency = 'QUORUM'
7
8
  end
8
9
 
9
10
  def add(column)
@@ -23,6 +24,12 @@ module DatastaxRails#:nodoc:
23
24
  @action = 'ALTER'
24
25
  self
25
26
  end
27
+
28
+ def rename(col1,col2)
29
+ set_column([col1,col2])
30
+ @action = 'RENAME'
31
+ self
32
+ end
26
33
 
27
34
  def set_column(column)
28
35
  if(@action)
@@ -39,6 +46,8 @@ module DatastaxRails#:nodoc:
39
46
  stmt << "ADD #{@column.keys.first} #{@column.values.first}"
40
47
  elsif(@action == 'DROP')
41
48
  stmt << "DROP #{@column}"
49
+ elsif(@action == 'RENAME')
50
+ stmt << "RENAME \"#{@column[0]}\" TO \"#{@column[1]}\""
42
51
  end
43
52
 
44
53
  stmt
@@ -4,6 +4,7 @@ module DatastaxRails
4
4
  # Base initialize that sets the default consistency.
5
5
  def initialize(klass, *args)
6
6
  @consistency = klass.default_consistency.to_s.upcase
7
+ @keyspace = DatastaxRails::Base.config[:keyspace]
7
8
  end
8
9
 
9
10
  # Abstract. Should be overridden by subclasses
@@ -17,7 +18,7 @@ module DatastaxRails
17
18
  def execute
18
19
  cql = self.to_cql
19
20
  puts cql if ENV['DEBUG_CQL'] == 'true'
20
- DatastaxRails::Base.connection.execute_cql_query(cql)
21
+ DatastaxRails::Base.connection.execute_cql_query(cql, :consistency => CassandraCQL::Thrift::ConsistencyLevel.const_get(@consistency || 'QUORUM'))
21
22
  end
22
23
  end
23
24
  end
@@ -6,7 +6,7 @@ module DatastaxRails#:nodoc:
6
6
  @columns = {}
7
7
  @storage_parameters = []
8
8
  @key_type = 'uuid'
9
- @key_columns = @key_name = "KEY"
9
+ @key_columns = @key_name = "key"
10
10
  end
11
11
 
12
12
  def key_type(key_type)
@@ -48,11 +48,11 @@ module DatastaxRails#:nodoc:
48
48
  end
49
49
 
50
50
  def to_cql
51
- stmt = "CREATE COLUMNFAMILY #{@cf_name} (\"#{@key_name}\" #{@key_type}, "
51
+ stmt = "CREATE COLUMNFAMILY #{@cf_name} (#{@key_name} #{@key_type}, "
52
52
  @columns.each do |name,type|
53
53
  stmt << "#{name} #{type}, "
54
54
  end
55
- stmt << "PRIMARY KEY (\"#{@key_columns}\"))"
55
+ stmt << "PRIMARY KEY (#{@key_columns}))"
56
56
  unless @storage_parameters.empty?
57
57
  stmt << " WITH "
58
58
  stmt << @storage_parameters.join(" AND ")
@@ -0,0 +1,25 @@
1
+ module DatastaxRails#:nodoc:
2
+ module Cql #:nodoc:
3
+ class CreateIndex < Base #:nodoc:
4
+ def initialize(index_name = nil)
5
+ @cf_name = nil
6
+ @column = nil
7
+ @index_name = index_name
8
+ end
9
+
10
+ def on(cf_name)
11
+ @cf_name = cf_name
12
+ self
13
+ end
14
+
15
+ def column(column)
16
+ @column = column
17
+ self
18
+ end
19
+
20
+ def to_cql
21
+ "CREATE INDEX #{@index_name} ON #{@cf_name} (#{@column})"
22
+ end
23
+ end
24
+ end
25
+ end
@@ -17,12 +17,12 @@ module DatastaxRails#:nodoc:
17
17
  end
18
18
 
19
19
  def to_cql
20
- stmt = "CREATE KEYSPACE #{@ks_name} WITH strategy_class = '#{@strategy_class}'"
20
+ stmt = "CREATE KEYSPACE #{@ks_name} WITH REPLICATION = {'class' : '#{@strategy_class}'"
21
21
 
22
22
  @strategy_options.each do |key, value|
23
- stmt << " AND strategy_options:#{key.to_s} = '#{value.to_s}'"
23
+ stmt << ", '#{key.to_s}' : '#{value.to_s}'"
24
24
  end
25
-
25
+ stmt << '}'
26
26
  stmt
27
27
  end
28
28
  end
@@ -7,7 +7,7 @@ module DatastaxRails
7
7
  @timestamp = nil
8
8
  @columns = []
9
9
  @conditions = {}
10
- @key_name = "KEY"
10
+ @key_name = "key"
11
11
  super
12
12
  end
13
13
 
@@ -37,8 +37,8 @@ module DatastaxRails
37
37
  end
38
38
 
39
39
  def to_cql
40
- values = [@keys]
41
- stmt = "DELETE #{@columns.join(',')} FROM #{@klass.column_family} USING CONSISTENCY #{@consistency} "
40
+ values = [@keys.collect{|k|k.to_s}]
41
+ stmt = "DELETE #{@columns.join(',')} FROM #{@klass.column_family} "
42
42
 
43
43
  if(@timestamp)
44
44
  stmt << "AND TIMESTAMP #{@timestamp} "
@@ -0,0 +1,13 @@
1
+ module DatastaxRails
2
+ module Cql
3
+ class DropIndex < Base
4
+ def initialize(index_name)
5
+ @index_name = index_name
6
+ end
7
+
8
+ def to_cql
9
+ "DROP INDEX #{@index_name}"
10
+ end
11
+ end
12
+ end
13
+ end
@@ -36,7 +36,7 @@ module DatastaxRails
36
36
  keys << k.to_s
37
37
  values << v
38
38
  end
39
- stmt = "INSERT INTO #{@klass.column_family} (#{keys.join(',')}) VALUES (#{('?'*keys.size).split(//).join(',')}) USING CONSISTENCY #{@consistency} "
39
+ stmt = "INSERT INTO #{@klass.column_family} (#{keys.join(',')}) VALUES (#{('?'*keys.size).split(//).join(',')}) "
40
40
 
41
41
  if(@ttl)
42
42
  stmt << "AND TTL #{@ttl} "
@@ -46,7 +46,7 @@ module DatastaxRails
46
46
  stmt << "AND TIMESTAMP #{@timestamp}"
47
47
  end
48
48
 
49
- CassandraCQL::Statement.sanitize(stmt, values)
49
+ CassandraCQL::Statement.sanitize(stmt, values).force_encoding('UTF-8')
50
50
  end
51
51
  end
52
52
  end
@@ -39,10 +39,10 @@ module DatastaxRails#:nodoc:
39
39
  def to_cql
40
40
  conditions = []
41
41
  values = []
42
- stmt = "SELECT #{@select} FROM #{@klass.column_family} USING CONSISTENCY #{@consistency} "
42
+ stmt = "SELECT #{@select} FROM #{@klass.column_family} "
43
43
 
44
44
  if @paginate
45
- conditions << "token(\"KEY\") > token('#{@paginate}')"
45
+ conditions << "token(key) > token('#{@paginate}')"
46
46
  end
47
47
 
48
48
  @conditions.each do |k,v|
@@ -37,29 +37,29 @@ module DatastaxRails
37
37
  column_names = @columns.keys
38
38
 
39
39
 
40
- stmt = "update #{@klass.column_family} using consistency #{@consistency} "
41
-
42
- if(@ttl)
43
- stmt << "AND TTL #{@ttl} "
44
- end
40
+ stmt = "update #{@klass.column_family} "
41
+
42
+ if(@ttl)
43
+ stmt << "AND TTL #{@ttl} "
44
+ end
45
+
46
+ if(@timestamp)
47
+ stmt << "AND TIMESTAMP #{@timestamp}"
48
+ end
49
+
50
+ unless @columns.empty?
51
+ stmt << "SET "
45
52
 
46
- if(@timestamp)
47
- stmt << "AND TIMESTAMP #{@timestamp}"
48
- end
53
+ first_entry = column_names.first
49
54
 
50
- unless @columns.empty?
51
- stmt << "SET "
52
-
53
- first_entry = column_names.first
54
-
55
- stmt << CassandraCQL::Statement.sanitize("\"#{first_entry.to_s}\" = ?", [@columns[first_entry]])
56
- column_names[1..-1].each do |col|
57
- stmt << CassandraCQL::Statement.sanitize(", \"#{col.to_s}\" = ?", [@columns[col]])
58
- end
55
+ stmt << CassandraCQL::Statement.sanitize("\"#{first_entry.to_s}\" = ?", [@columns[first_entry]])
56
+ column_names[1..-1].each do |col|
57
+ stmt << CassandraCQL::Statement.sanitize(", \"#{col.to_s}\" = ?", [@columns[col]])
59
58
  end
60
-
61
- stmt << CassandraCQL::Statement.sanitize(" WHERE \"KEY\" IN (?)", [@key])
62
- stmt
59
+ end
60
+
61
+ stmt << CassandraCQL::Statement.sanitize(" WHERE key IN (?)", [@key])
62
+ stmt.force_encoding('UTF-8')
63
63
  end
64
64
 
65
65
  # def execute
@@ -13,9 +13,11 @@ module DatastaxRails
13
13
  autoload :ColumnFamily
14
14
  autoload :Consistency
15
15
  autoload :CreateColumnFamily
16
+ autoload :CreateIndex
16
17
  autoload :CreateKeyspace
17
18
  autoload :Delete
18
19
  autoload :DropColumnFamily
20
+ autoload :DropIndex
19
21
  autoload :DropKeyspace
20
22
  autoload :Insert
21
23
  autoload :Select
@@ -112,7 +112,7 @@ module DatastaxRails
112
112
  end
113
113
 
114
114
  attribute_definitions.each do |k,definition|
115
- casted[k.to_s] = definition.instantiate(object, attributes[k])
115
+ casted[k.to_s] = definition.instantiate(object, attributes[k]).to_s
116
116
  end
117
117
  casted
118
118
  end
@@ -123,17 +123,8 @@ module DatastaxRails
123
123
  end
124
124
 
125
125
  def write_with_solr(key, attributes, options)
126
- # We need to collect removed fields since we can't currently delete the column via
127
- # the solr interface
128
- removed_fields = []
129
- attributes.each do |k,v|
130
- removed_fields << k.to_s if v.blank?
131
- end
132
126
  xml_doc = RSolr::Xml::Generator.new.add(attributes.merge(:id => key))
133
127
  self.solr_connection.update(:data => xml_doc, :params => {:replacefields => false, :cl => options[:consistency]})
134
- unless removed_fields.empty?
135
- cql.delete(key.to_s).columns(removed_fields).using(options[:consistency]).execute
136
- end
137
128
  end
138
129
  end
139
130
 
@@ -205,6 +196,7 @@ module DatastaxRails
205
196
  end
206
197
 
207
198
  def write(options) #:nodoc:
199
+ options[:new_record] = new_record?
208
200
  changed_attributes = changed.inject({}) { |h, n| h[n] = read_attribute(n); h }
209
201
  return true if changed_attributes.empty?
210
202
  self.class.write(key, changed_attributes, options)
@@ -1,8 +1,15 @@
1
1
  require 'rubygems'
2
2
  require 'datastax_rails'
3
3
  require 'rails'
4
+ require 'action_controller/railtie'
5
+
4
6
  module DatastaxRails
5
7
  class Railtie < Rails::Railtie
8
+ config.action_dispatch.rescue_responses.merge!(
9
+ 'DatastaxRails::RecordNotFound' => :not_found,
10
+ 'DatastaxRails::RecordInvalid' => :unprocessable_entity,
11
+ 'DatastaxRails::RecordNotSaved' => :unprocessable_entity)
12
+
6
13
  initializer 'datastax_rails.init' do
7
14
  ActiveSupport.on_load(:datastax_rails) do
8
15
  end
@@ -22,6 +22,16 @@ module DatastaxRails
22
22
  records.each { |record| yield record }
23
23
  end
24
24
  end
25
+
26
+ def find_each_with_index(options = {})
27
+ idx = 0
28
+ find_in_batches(options) do |records|
29
+ records.each do |record|
30
+ yield record, idx
31
+ idx += 1
32
+ end
33
+ end
34
+ end
25
35
 
26
36
  # Yields each batch of records that was found by the find +options+ as
27
37
  # an array. The size of each batch is set by the <tt>:batch_size</tt>
@@ -50,14 +60,14 @@ module DatastaxRails
50
60
  # @param options [Hash] finder options
51
61
  # @yeild [records] a batch of DatastaxRails records
52
62
  def find_in_batches(options = {})
53
- relation = self.with_cassandra
63
+ relation = self
54
64
 
55
- unless @order_values.empty? && @per_page_value.blank?
65
+ unless (@order_values.empty? || @order_values == [{:created_at => :asc}])
56
66
  DatastaxRails::Base.logger.warn("Scoped order and limit are ignored, it's forced to be batch order and batch size")
57
67
  end
58
68
 
59
69
  if (finder_options = options.except(:start, :batch_size)).present?
60
- raise "You can't specify an order, it's forced to be #{batch_order}" if options[:order].present?
70
+ raise "You can't specify an order, it's forced to be #{relation.use_solr_value ? "created_at" : "key"}" if options[:order].present?
61
71
  raise "You can't specify a limit, it's forced to be the batch_size" if options[:limit].present?
62
72
 
63
73
  relation = apply_finder_options(finder_options)
@@ -66,20 +76,23 @@ module DatastaxRails
66
76
  start = options.delete(:start)
67
77
  batch_size = options.delete(:batch_size) || 1000
68
78
 
79
+ batch_order = relation.use_solr_value ? :created_at : :key
69
80
  relation = relation.limit(batch_size)
70
- records = start ? relation.where(:KEY).greater_than(start).to_a : relation.to_a
71
-
81
+ relation = relation.order(batch_order) if relation.use_solr_value
82
+ records = start ? relation.where(batch_order).greater_than(start).to_a : relation.to_a
72
83
  while records.size > 0
73
84
  records_size = records.size
74
- primary_key_offset = records.last.id
85
+ offset = relation.use_solr_value ? records.last.created_at.to_time : records.last.id
75
86
  yield records
76
87
 
77
88
  break if records_size < batch_size
78
-
79
- if primary_key_offset
80
- records = relation.where(:KEY).greater_than(primary_key_offset).to_a
89
+ if offset
90
+ if relation.use_solr_value
91
+ offset += 1
92
+ end
93
+ records = relation.where(batch_order).greater_than(offset).to_a
81
94
  else
82
- raise "Primary key not included in the custom select clause"
95
+ raise "Batch order not included in the custom select clause"
83
96
  end
84
97
  end
85
98
  end
@@ -0,0 +1,17 @@
1
+ module DatastaxRails
2
+ module FacetMethods
3
+ # Instructs SOLR to get facet counts on the passed in field.
4
+ #
5
+ # Model.facet(:category)
6
+ #
7
+ # This may be specified multiple times to get facet counts on multiple fields.
8
+ #
9
+ # @param field [String, Symbol] the field to get facet counts for
10
+ # @return [DatastaxRails::Relation] a new Relation object
11
+ def facet(field)
12
+ clone.tap do |r|
13
+ r.facet_field_values << field
14
+ end
15
+ end
16
+ end
17
+ end
@@ -137,11 +137,11 @@ module DatastaxRails
137
137
  end
138
138
 
139
139
  def find_one(id)
140
- with_cassandra.where(:KEY => id).first || raise(RecordNotFound, "Couldn't find #{@klass.name} with ID=#{id}")
140
+ with_cassandra.where(:key => id).first || raise(RecordNotFound, "Couldn't find #{@klass.name} with ID=#{id}")
141
141
  end
142
142
 
143
143
  def find_some(ids)
144
- result = with_cassandra.where(:KEY => ids).all
144
+ result = with_cassandra.where(:key => ids).all
145
145
 
146
146
  expected_size =
147
147
  if @limit_value && ids.size > @limit_value
@@ -163,7 +163,7 @@ module DatastaxRails
163
163
  return self if attribute.blank?
164
164
 
165
165
  clone.tap do |r|
166
- order_by = attribute.is_a?(Hash) ? attribute.dup : {attribute => :asc}
166
+ order_by = attribute.is_a?(Hash) ? attribute.dup : {attribute.to_sym => :asc}
167
167
 
168
168
  r.order_values << order_by
169
169
  end
@@ -240,7 +240,13 @@ module DatastaxRails
240
240
  # works if you run against a secondary index. So this currently just
241
241
  # delegates to the count_via_solr method.
242
242
  def count_via_cql
243
- with_solr.count_via_solr
243
+ select_columns = ['count(*)']
244
+ cql = @cql.select(select_columns)
245
+ cql.using(@consistency_value) if @consistency_value
246
+ @where_values.each do |wv|
247
+ cql.conditions(wv)
248
+ end
249
+ CassandraCQL::Result.new(cql.execute).fetch['count']
244
250
  end
245
251
 
246
252
  # Constructs a CQL query and runs it against Cassandra directly. For this to
@@ -248,7 +254,7 @@ module DatastaxRails
248
254
  # For ad-hoc queries, you will have to use Solr.
249
255
  def query_via_cql
250
256
  select_columns = select_values.empty? ? (@klass.attribute_definitions.keys - @klass.lazy_attributes) : select_values.flatten
251
- cql = @cql.select(select_columns)
257
+ cql = @cql.select(select_columns + ['key'])
252
258
  cql.using(@consistency_value) if @consistency_value
253
259
  @where_values.each do |wv|
254
260
  cql.conditions(wv)
@@ -256,7 +262,7 @@ module DatastaxRails
256
262
  @greater_than_values.each do |gtv|
257
263
  gtv.each do |k,v|
258
264
  # Special case if inequality is equal to the primary key (we're paginating)
259
- if(k == :KEY)
265
+ if(k == :key)
260
266
  cql.paginate(v)
261
267
  end
262
268
  end
@@ -266,7 +272,7 @@ module DatastaxRails
266
272
  end
267
273
  results = []
268
274
  CassandraCQL::Result.new(cql.execute).fetch do |row|
269
- results << @klass.instantiate(row.row.key, row.to_hash, select_columns)
275
+ results << @klass.instantiate(row['key'], row.to_hash, select_columns)
270
276
  end
271
277
  results
272
278
  end
@@ -362,7 +368,8 @@ module DatastaxRails
362
368
  end
363
369
 
364
370
  select_columns = select_values.empty? ? (@klass.attribute_definitions.keys - @klass.lazy_attributes) : select_values.flatten
365
-
371
+ select_columns << "id"
372
+ params[:fl] = select_columns.collect(&:to_s).join(",")
366
373
  unless(@stats_values.empty?)
367
374
  params[:stats] = 'true'
368
375
  @stats_values.flatten.each do |sv|
@@ -398,6 +405,7 @@ module DatastaxRails
398
405
  if solr_response["stats"]
399
406
  @stats = solr_response["stats"]["stats_fields"].with_indifferent_access
400
407
  end
408
+ pp params if ENV['DEBUG_SOLR'] == 'true'
401
409
  results
402
410
  end
403
411
 
@@ -487,7 +495,7 @@ module DatastaxRails
487
495
  def method_missing(method, *args, &block) #:nodoc:
488
496
  if Array.method_defined?(method)
489
497
  to_a.send(method, *args, &block)
490
- elsif @klass.respond_to?(method)
498
+ elsif @klass.respond_to?(method, true)
491
499
  scoping { @klass.send(method, *args, &block) }
492
500
  else
493
501
  super