datastax_rails 1.0.16.3 → 1.0.17.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -53,7 +53,11 @@ module DatastaxRails
53
53
 
54
54
  # Casts the attribute and stores it in the attribute hash.
55
55
  def write_attribute(name, value)
56
- @attributes[name.to_s] = self.class.typecast_attribute(self, name, value)
56
+ if(attribute_definitions[name.to_sym].coder.is_a?(DatastaxRails::Types::BinaryType))
57
+ @attributes[name.to_s] = value
58
+ else
59
+ @attributes[name.to_s] = self.class.typecast_attribute(self, name, value)
60
+ end
57
61
  end
58
62
 
59
63
  # Returns the attribute out of the attribute hash. If the attribute is lazy loaded and hasn't
@@ -478,6 +478,10 @@ module DatastaxRails #:nodoc:
478
478
  @column_family || name.pluralize
479
479
  end
480
480
 
481
+ def payload_model?
482
+ self.ancestors.include?(DatastaxRails::PayloadModel)
483
+ end
484
+
481
485
  def base_class
482
486
  klass = self
483
487
  while klass.superclass != Base
@@ -4,10 +4,19 @@ module DatastaxRails
4
4
  # @return [Fixnum] the total number of entries that match the search
5
5
  # @!attribute [r] last_column_name
6
6
  # @return [Fixnum] the last column that was returned in the search in case you limited the number of columns (not supported)
7
- attr_accessor :last_column_name, :total_entries
7
+ # @!attribute [r] per_page
8
+ # @return [Fixnum] the per page value of the search that produced these results (used by will_paginate)
9
+ # @!attribute [r] current_page
10
+ # @return [Fixnum] the current page of the search that produced these results (used by will_paginate)
11
+ attr_accessor :last_column_name, :total_entries, :per_page, :current_page
8
12
 
9
13
  def inspect
10
14
  "<DatastaxRails::Collection##{object_id} contents: #{super} last_column_name: #{last_column_name.inspect}>"
11
15
  end
16
+
17
+ def total_pages
18
+ return 1 unless per_page
19
+ (total_entries / per_page.to_f).ceil
20
+ end
12
21
  end
13
22
  end
@@ -0,0 +1,48 @@
1
+ module DatastaxRails#:nodoc:
2
+ module Cql #:nodoc:
3
+ class AlterColumnFamily < Base #:nodoc:
4
+ def initialize(cf_name)
5
+ @cf_name = cf_name
6
+ @action = nil
7
+ end
8
+
9
+ def add(column)
10
+ set_column(column)
11
+ @action = 'ADD'
12
+ self
13
+ end
14
+
15
+ def drop(column)
16
+ set_column(column)
17
+ @action = 'DROP'
18
+ self
19
+ end
20
+
21
+ def alter(column)
22
+ set_column(column)
23
+ @action = 'ALTER'
24
+ self
25
+ end
26
+
27
+ def set_column(column)
28
+ if(@action)
29
+ raise ArgumentError, "Only one operation allowed per CQL call"
30
+ end
31
+ @column = column
32
+ end
33
+
34
+ def to_cql
35
+ stmt = "ALTER COLUMNFAMILY #{@cf_name} "
36
+ if(@action == 'ALTER')
37
+ stmt << "ALTER #{@column.keys.first} TYPE #{@column.values.first}"
38
+ elsif(@action == 'ADD')
39
+ stmt << "ADD #{@column.keys.first} #{@column.values.first}"
40
+ elsif(@action == 'DROP')
41
+ stmt << "DROP #{@column}"
42
+ end
43
+
44
+ stmt
45
+ end
46
+ end
47
+ end
48
+ end
@@ -16,6 +16,7 @@ module DatastaxRails
16
16
  # already been set up (Rails does this for you).
17
17
  def execute
18
18
  cql = self.to_cql
19
+ # puts cql
19
20
  DatastaxRails::Base.connection.execute_cql_query(cql)
20
21
  end
21
22
  end
@@ -4,8 +4,9 @@ module DatastaxRails#:nodoc:
4
4
  def initialize(cf_name)
5
5
  @cf_name = cf_name
6
6
  @columns = {}
7
- @storage_parameters = {}
7
+ @storage_parameters = []
8
8
  @key_type = 'uuid'
9
+ @key_columns = @key_name = "KEY"
9
10
  end
10
11
 
11
12
  def key_type(key_type)
@@ -13,8 +14,18 @@ module DatastaxRails#:nodoc:
13
14
  self
14
15
  end
15
16
 
17
+ def key_name(key_name)
18
+ @key_name = key_name
19
+ self
20
+ end
21
+
22
+ def key_columns(key_columns)
23
+ @key_columns = key_columns
24
+ self
25
+ end
26
+
16
27
  def with(with)
17
- @storage_parameters.merge!(with)
28
+ @storage_parameters << with
18
29
  self
19
30
  end
20
31
 
@@ -36,24 +47,15 @@ module DatastaxRails#:nodoc:
36
47
  with("default_validation" => val)
37
48
  end
38
49
 
39
- def column_type=(type)
40
- # TODO: Ignored till CQL supports super-columns
41
- end
42
-
43
50
  def to_cql
44
- stmt = "CREATE COLUMNFAMILY #{@cf_name} (key #{@key_type} PRIMARY KEY"
51
+ stmt = "CREATE COLUMNFAMILY #{@cf_name} (\"#{@key_name}\" #{@key_type}, "
45
52
  @columns.each do |name,type|
46
- stmt << ", #{name} #{type}"
53
+ stmt << "#{name} #{type}, "
47
54
  end
48
- stmt << ")"
55
+ stmt << "PRIMARY KEY (\"#{@key_columns}\"))"
49
56
  unless @storage_parameters.empty?
50
57
  stmt << " WITH "
51
- first_parm = @storage_parameter.shift
52
- stmt << "#{first_parm.first.to_s} = '#{first_parm.last.to_s}'"
53
-
54
- @storage_parameters.each do |key, value|
55
- stmt << " AND #{key.to_s} = '#{value.to_s}'"
56
- end
58
+ stmt << @storage_parameters.join(" AND ")
57
59
  end
58
60
 
59
61
  stmt
@@ -6,6 +6,8 @@ module DatastaxRails
6
6
  @keys = keys
7
7
  @timestamp = nil
8
8
  @columns = []
9
+ @conditions = {}
10
+ @key_name = "KEY"
9
11
  super
10
12
  end
11
13
 
@@ -19,22 +21,37 @@ module DatastaxRails
19
21
  self
20
22
  end
21
23
 
24
+ def conditions(conditions)
25
+ @conditions.merge!(conditions)
26
+ self
27
+ end
28
+
22
29
  def timestamp(timestamp)
23
30
  @timestamp = timestamp
24
31
  self
25
32
  end
26
33
 
34
+ def key_name(key_name)
35
+ @key_name = key_name
36
+ self
37
+ end
38
+
27
39
  def to_cql
28
- values = []
40
+ values = [@keys]
29
41
  stmt = "DELETE #{@columns.join(',')} FROM #{@klass.column_family} USING CONSISTENCY #{@consistency} "
30
42
 
31
43
  if(@timestamp)
32
44
  stmt << "AND TIMESTAMP #{@timestamp} "
33
45
  end
34
46
 
35
- stmt << "WHERE KEY IN (?)"
47
+ stmt << "WHERE \"#{@key_name}\" IN (?)"
48
+
49
+ @conditions.each do |col,val|
50
+ stmt << " AND #{col} = ?"
51
+ values << val
52
+ end
36
53
 
37
- CassandraCQL::Statement.sanitize(stmt, @keys)
54
+ CassandraCQL::Statement.sanitize(stmt, values)
38
55
  end
39
56
  end
40
57
  end
@@ -6,6 +6,7 @@ module DatastaxRails#:nodoc:
6
6
  @select = select.join(",")
7
7
  @limit = nil
8
8
  @conditions = {}
9
+ @order = nil
9
10
  super
10
11
  end
11
12
 
@@ -24,6 +25,11 @@ module DatastaxRails#:nodoc:
24
25
  self
25
26
  end
26
27
 
28
+ def order(order)
29
+ @order = order
30
+ self
31
+ end
32
+
27
33
  def to_cql
28
34
  conditions = []
29
35
  values = []
@@ -31,9 +37,9 @@ module DatastaxRails#:nodoc:
31
37
  @conditions.each do |k,v|
32
38
  values << v
33
39
  if v.kind_of?(Array)
34
- conditions << "#{k.to_s} IN (?)"
40
+ conditions << "\"#{k.to_s}\" IN (?)"
35
41
  else
36
- conditions << "#{k.to_s} = ?"
42
+ conditions << "\"#{k.to_s}\" = ?"
37
43
  end
38
44
  end
39
45
 
@@ -42,7 +48,11 @@ module DatastaxRails#:nodoc:
42
48
  end
43
49
 
44
50
  if @limit
45
- stmt << "LIMIT #{@limit}"
51
+ stmt << "LIMIT #{@limit} "
52
+ end
53
+
54
+ if @order
55
+ stmt << "ORDER BY #{@order}"
46
56
  end
47
57
 
48
58
  CassandraCQL::Statement.sanitize(stmt, values)
@@ -35,9 +35,9 @@ module DatastaxRails
35
35
 
36
36
  def to_cql
37
37
  column_names = @columns.keys
38
- cql = ""
39
- Tempfile.open('cql', Rails.root.join("tmp")) do |stmt|
40
- stmt << "update #{@klass.column_family} using consistency #{@consistency} "
38
+
39
+
40
+ stmt = "update #{@klass.column_family} using consistency #{@consistency} "
41
41
 
42
42
  if(@ttl)
43
43
  stmt << "AND TTL #{@ttl} "
@@ -52,17 +52,14 @@ module DatastaxRails
52
52
 
53
53
  first_entry = column_names.first
54
54
 
55
- stmt << CassandraCQL::Statement.sanitize("#{first_entry.to_s} = ?", [@columns[first_entry]])
55
+ stmt << CassandraCQL::Statement.sanitize("\"#{first_entry.to_s}\" = ?", [@columns[first_entry]])
56
56
  column_names[1..-1].each do |col|
57
- stmt << CassandraCQL::Statement.sanitize(", #{col.to_s} = ?", [@columns[col]])
57
+ stmt << CassandraCQL::Statement.sanitize(", \"#{col.to_s}\" = ?", [@columns[col]])
58
58
  end
59
59
  end
60
60
 
61
- stmt << CassandraCQL::Statement.sanitize(" WHERE KEY IN (?)", [@key])
62
- stmt.rewind
63
- cql = stmt.read
64
- end
65
- cql
61
+ stmt << CassandraCQL::Statement.sanitize(" WHERE \"KEY\" IN (?)", [@key])
62
+ stmt
66
63
  end
67
64
 
68
65
  # def execute
@@ -8,6 +8,7 @@ module DatastaxRails
8
8
  end
9
9
  end
10
10
 
11
+ autoload :AlterColumnFamily
11
12
  autoload :Base
12
13
  autoload :ColumnFamily
13
14
  autoload :Consistency
@@ -4,6 +4,7 @@ module DatastaxRails
4
4
  class NaturalKey
5
5
  attr_reader :value
6
6
 
7
+ delegate :size, :bytesize, :to => :value
7
8
  def initialize(value)
8
9
  @value = value
9
10
  end
@@ -0,0 +1,89 @@
1
+ module DatastaxRails
2
+ # A special model that is designed to efficiently store binary files.
3
+ # The major limitation is that the only fields this can store are
4
+ # the SHA1 digest and the payload itself. If you need to store
5
+ # other metadata, you will need another model that points at this
6
+ # one.
7
+ #
8
+ # class AttachmentPayload < DatastaxRails::Payload
9
+ # self.column_family = 'attachment_payloads'
10
+ #
11
+ # validate do
12
+ # if self.payload.size > 50.megabytes
13
+ # errors.add(:payload, "is larger than the limit of 50MB")
14
+ # end
15
+ # end
16
+ # end
17
+ class PayloadModel < Base
18
+
19
+ def self.inherited(child)
20
+ super
21
+ child.key :natural, :attributes => :digest
22
+ child.string :digest
23
+ child.binary :payload
24
+ child.validates :digest, :presence => true
25
+ end
26
+
27
+ def self.scoped
28
+ super.with_cassandra
29
+ end
30
+
31
+ def self.find(digest, options = {})
32
+ raise ArgumentError, "'#{options[:consistency]}' is not a valid Cassandra consistency level" unless valid_consistency?(options[:consistency].to_s.upcase) if options[:consistency]
33
+ c = cql.select.conditions(:digest => digest).order('chunk')
34
+ c.using(options[:consistency]) if options[:consistency]
35
+ io = StringIO.new("","w+")
36
+ found = false
37
+ CassandraCQL::Result.new(c.execute).fetch do |row|
38
+ io << Base64.decode64(row.to_hash['payload'])
39
+ found = true
40
+ end
41
+ raise DatastaxRails::RecordNotFound unless found
42
+ io.rewind
43
+ self.instantiate(digest, {:digest => digest, :payload => io.read}, [:digest, :payload])
44
+ end
45
+
46
+ def self.write(key, attributes, options = {})
47
+ raise ArgumentError, "'#{options[:consistency]}' is not a valid Cassandra consistency level" unless valid_consistency?(options[:consistency].to_s.upcase) if options[:consistency]
48
+ c = self.cql.select("count(*)").conditions(:digest => key)
49
+ count = CassandraCQL::Result.new(c.execute).fetch.to_hash["count"]
50
+
51
+ i = 0
52
+ io = StringIO.new(attributes['payload'])
53
+ while chunk = io.read(1.megabyte)
54
+ c = cql.insert.columns(:digest => key, :chunk => i, :payload => Base64.encode64(chunk))
55
+ c.using(options[:consistency]) if options[:consistency]
56
+ c.execute
57
+ i += 1
58
+ end
59
+
60
+ if count and count > i
61
+ i.upto(count) do |j|
62
+ c = cql.delete(key.to_s).key_name('digest').conditions(:chunk => j)
63
+ c.using(options[:consistency]) if options[:consistency]
64
+ c.execute
65
+ end
66
+ end
67
+
68
+ key
69
+ end
70
+
71
+ # Instantiates a new object without calling +initialize+.
72
+ #
73
+ # @param [String] key the primary key for the record
74
+ # @param [Hash] attributes a hash containing the columns to set on the record
75
+ # @param [Array] selected_attributes an array containing the attributes that were originally selected from cassandra
76
+ # to build this object. Used so that we can avoid lazy-loading attributes that don't exist.
77
+ # @return [DatastaxRails::Base] a model with the given attributes
78
+ def self.instantiate(key, attributes, selected_attributes = [])
79
+ allocate.tap do |object|
80
+ object.instance_variable_set("@loaded_attributes", {}.with_indifferent_access)
81
+ object.instance_variable_set("@key", parse_key(key)) if key
82
+ object.instance_variable_set("@new_record", false)
83
+ object.instance_variable_set("@destroyed", false)
84
+ object.instance_variable_set("@attributes", attributes.with_indifferent_access)
85
+ attributes.keys.each {|k| object.instance_variable_get("@loaded_attributes")[k] = true}
86
+ end
87
+ end
88
+ end
89
+ end
@@ -54,10 +54,9 @@ module DatastaxRails
54
54
  # @param [Hash] attributes a hash containing the columns to set on the record
55
55
  # @param [Hash] options a hash containing various options
56
56
  # @option options [Symbol] :consistency the consistency to set for the Cassandra operation (e.g., ALL)
57
- # @option options [String] :schema_version the version of the schema to set for this record
58
57
  def write(key, attributes, options = {})
59
58
  key.tap do |key|
60
- attributes = encode_attributes(attributes, options[:schema_version])
59
+ attributes = encode_attributes(attributes)
61
60
  ActiveSupport::Notifications.instrument("insert.datastax_rails", :column_family => column_family, :key => key, :attributes => attributes) do
62
61
  c = cql.update(key.to_s).columns(attributes)
63
62
  if(options[:consistency])
@@ -73,10 +72,6 @@ module DatastaxRails
73
72
  end
74
73
  end
75
74
 
76
- def store_file(key, file, options = {})
77
-
78
- end
79
-
80
75
  # Instantiates a new object without calling +initialize+.
81
76
  #
82
77
  # @param [String] key the primary key for the record
@@ -87,7 +82,6 @@ module DatastaxRails
87
82
  def instantiate(key, attributes, selected_attributes = [])
88
83
  allocate.tap do |object|
89
84
  object.instance_variable_set("@loaded_attributes", {}.with_indifferent_access)
90
- object.instance_variable_set("@schema_version", attributes.delete('schema_version'))
91
85
  object.instance_variable_set("@key", parse_key(key)) if key
92
86
  object.instance_variable_set("@new_record", false)
93
87
  object.instance_variable_set("@destroyed", false)
@@ -99,23 +93,11 @@ module DatastaxRails
99
93
  # to do the heavy lifting.
100
94
  #
101
95
  # @param [Hash] attributes a hash containing the attributes to be encoded for storage
102
- # @param [String] schema_version the schema version to set in Cassandra. Not currently used.
103
96
  # @return [Hash] a new hash with attributes encoded for storage
104
- def encode_attributes(attributes, schema_version)
105
- encoded = {"schema_version" => schema_version.to_s}
97
+ def encode_attributes(attributes)
98
+ encoded = {}
106
99
  attributes.each do |column_name, value|
107
- # if value.nil?
108
- # encoded[column_name.to_s] = ""
109
- # else
110
- encoded_value = attribute_definitions[column_name.to_sym].coder.encode(value)
111
- if(encoded_value.is_a?(Array))
112
- encoded_value.each_with_index do |chunk,i|
113
- encoded[column_name.to_s + "_chunk_#{'%05d' % i}"] = chunk
114
- end
115
- else
116
- encoded[column_name.to_s] = encoded_value
117
- end
118
- # end
100
+ encoded[column_name.to_s] = attribute_definitions[column_name.to_sym].coder.encode(value)
119
101
  end
120
102
  encoded
121
103
  end
@@ -129,13 +111,7 @@ module DatastaxRails
129
111
  end
130
112
 
131
113
  attribute_definitions.each do |k,definition|
132
- if(definition.coder.is_a?(DatastaxRails::Types::BinaryType))
133
- # Need to handle possibly chunked data
134
- chunks = attributes.select {|key,value| key.to_s =~ /#{k.to_s}_chunk_\d+/ }.sort {|a,b| a.first.to_s <=> b.first.to_s}.collect {|c| c.last}
135
- casted[k.to_s] = definition.instantiate(object, chunks)
136
- else
137
- casted[k.to_s] = definition.instantiate(object, attributes[k])
138
- end
114
+ casted[k.to_s] = definition.instantiate(object, attributes[k])
139
115
  end
140
116
  casted
141
117
  end
@@ -210,7 +186,8 @@ module DatastaxRails
210
186
 
211
187
  def write(options) #:nodoc:
212
188
  changed_attributes = changed.inject({}) { |h, n| h[n] = read_attribute(n); h }
213
- self.class.write(key, changed_attributes, options.merge(:schema_version => schema_version))
189
+ return true if changed_attributes.empty?
190
+ self.class.write(key, changed_attributes, options)
214
191
  end
215
192
  end
216
193
  end
@@ -137,11 +137,11 @@ module DatastaxRails
137
137
  end
138
138
 
139
139
  def find_one(id)
140
- with_cassandra.where(:key => id).first || raise(RecordNotFound, "Couldn't find #{@klass.name} with #{primary_key}=#{id}")
140
+ with_cassandra.where(:KEY => id).first || raise(RecordNotFound, "Couldn't find #{@klass.name} with ID=#{id}")
141
141
  end
142
142
 
143
143
  def find_some(ids)
144
- result = with_cassandra.where(:key => ids).all
144
+ result = with_cassandra.where(:KEY => ids).all
145
145
 
146
146
  expected_size =
147
147
  if @limit_value && ids.size > @limit_value
@@ -371,6 +371,8 @@ module DatastaxRails
371
371
  # @return [DatastaxRails::Collection] the resulting collection
372
372
  def parse_docs(response, select_columns)
373
373
  results = DatastaxRails::Collection.new
374
+ results.per_page = @per_page_value
375
+ results.current_page = @page_value || 1
374
376
  results.total_entries = response['numFound'].to_i
375
377
  response['docs'].each do |doc|
376
378
  id = doc['id']