cassandra_store 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.gitignore +18 -0
- data/.rubocop.yml +123 -0
- data/.travis.yml +14 -0
- data/Gemfile +7 -0
- data/LICENSE.txt +22 -0
- data/README.md +282 -0
- data/Rakefile +8 -0
- data/cassandra_store.gemspec +31 -0
- data/docker-compose.yml +9 -0
- data/lib/cassandra_store.rb +18 -0
- data/lib/cassandra_store/base.rb +426 -0
- data/lib/cassandra_store/migration.rb +41 -0
- data/lib/cassandra_store/railtie.rb +7 -0
- data/lib/cassandra_store/relation.rb +187 -0
- data/lib/cassandra_store/schema_migration.rb +11 -0
- data/lib/cassandra_store/tasks/cassandra.rake +45 -0
- data/lib/cassandra_store/version.rb +3 -0
- data/spec/cassandra_store/base_spec.rb +691 -0
- data/spec/cassandra_store/migration_spec.rb +79 -0
- data/spec/cassandra_store/relation_spec.rb +222 -0
- data/spec/cassandra_store/schema_migration_spec.rb +19 -0
- data/spec/fixtures/1589957812_migration1.rb +5 -0
- data/spec/fixtures/1589957813_migration2.rb +5 -0
- data/spec/spec_helper.rb +87 -0
- metadata +214 -0
@@ -0,0 +1,41 @@
|
|
1
|
+
class CassandraStore::Migration
|
2
|
+
def self.migration_file(path, version)
|
3
|
+
Dir[File.join(path, "#{version}_*.rb")].first
|
4
|
+
end
|
5
|
+
|
6
|
+
def self.migration_class(path, version)
|
7
|
+
require migration_file(path, version)
|
8
|
+
|
9
|
+
File.basename(migration_file(path, version), ".rb").gsub(/\A[0-9]+_/, "").camelcase.constantize
|
10
|
+
end
|
11
|
+
|
12
|
+
def self.up(path, version)
|
13
|
+
migration_class(path, version).new.up
|
14
|
+
|
15
|
+
CassandraStore::SchemaMigration.create!(version: version.to_s)
|
16
|
+
end
|
17
|
+
|
18
|
+
def self.down(path, version)
|
19
|
+
migration_class(path, version).new.down
|
20
|
+
|
21
|
+
CassandraStore::SchemaMigration.where(version: version.to_s).delete_all
|
22
|
+
end
|
23
|
+
|
24
|
+
def self.migrate(path)
|
25
|
+
migrated = CassandraStore::SchemaMigration.all.to_a.map(&:version).to_set
|
26
|
+
all = Dir[File.join(path, "*.rb")].map { |file| File.basename(file) }
|
27
|
+
todo = all.select { |file| file =~ /\A[0-9]+_/ && !migrated.include?(file.to_i.to_s) }.sort_by(&:to_i)
|
28
|
+
|
29
|
+
todo.each do |file|
|
30
|
+
up path, file.to_i.to_s
|
31
|
+
end
|
32
|
+
end
|
33
|
+
|
34
|
+
def execute(*args)
|
35
|
+
CassandraStore::Base.execute(*args)
|
36
|
+
end
|
37
|
+
|
38
|
+
def up; end
|
39
|
+
|
40
|
+
def down; end
|
41
|
+
end
|
@@ -0,0 +1,187 @@
|
|
1
|
+
class CassandraStore::Relation
|
2
|
+
attr_accessor :target, :where_values, :where_cql_values, :order_values, :limit_value, :distinct_value, :select_values
|
3
|
+
|
4
|
+
def initialize(target:)
|
5
|
+
self.target = target
|
6
|
+
end
|
7
|
+
|
8
|
+
def all
|
9
|
+
fresh
|
10
|
+
end
|
11
|
+
|
12
|
+
def where(hash = {})
|
13
|
+
fresh.tap do |relation|
|
14
|
+
relation.where_values = (relation.where_values || []) + [hash]
|
15
|
+
end
|
16
|
+
end
|
17
|
+
|
18
|
+
def where_cql(string, args = {})
|
19
|
+
fresh.tap do |relation|
|
20
|
+
str = string
|
21
|
+
|
22
|
+
args.each do |key, value|
|
23
|
+
str.gsub!(":#{key}", target.quote_value(value))
|
24
|
+
end
|
25
|
+
|
26
|
+
relation.where_cql_values = (relation.where_cql_values || []) + [str]
|
27
|
+
end
|
28
|
+
end
|
29
|
+
|
30
|
+
def update_all(string_or_hash)
|
31
|
+
if string_or_hash.is_a?(Hash)
|
32
|
+
target.execute("UPDATE #{target.quote_table_name target.table_name} SET #{string_or_hash.map { |column, value| "#{target.quote_column_name column} = #{target.quote_value value}" }.join(", ")} #{where_clause}")
|
33
|
+
else
|
34
|
+
target.execute("UPDATE #{target.quote_table_name target.table_name} SET #{string_or_hash} #{where_clause}")
|
35
|
+
end
|
36
|
+
|
37
|
+
true
|
38
|
+
end
|
39
|
+
|
40
|
+
def order(hash = {})
|
41
|
+
fresh.tap do |relation|
|
42
|
+
relation.order_values = (relation.order_values || {}).merge(hash)
|
43
|
+
end
|
44
|
+
end
|
45
|
+
|
46
|
+
def limit(n)
|
47
|
+
fresh.tap do |relation|
|
48
|
+
relation.limit_value = n
|
49
|
+
end
|
50
|
+
end
|
51
|
+
|
52
|
+
def first(n = 1)
|
53
|
+
result = limit(n).to_a
|
54
|
+
|
55
|
+
return result.first if n == 1
|
56
|
+
|
57
|
+
result
|
58
|
+
end
|
59
|
+
|
60
|
+
def distinct
|
61
|
+
fresh.tap do |relation|
|
62
|
+
relation.distinct_value = true
|
63
|
+
end
|
64
|
+
end
|
65
|
+
|
66
|
+
def select(*columns)
|
67
|
+
fresh.tap do |relation|
|
68
|
+
relation.select_values = (relation.select_values || []) + columns
|
69
|
+
end
|
70
|
+
end
|
71
|
+
|
72
|
+
def find_each(options = {})
|
73
|
+
return enum_for(:find_each, options) unless block_given?
|
74
|
+
|
75
|
+
find_in_batches options do |batch|
|
76
|
+
batch.each do |record|
|
77
|
+
yield record
|
78
|
+
end
|
79
|
+
end
|
80
|
+
end
|
81
|
+
|
82
|
+
def find_in_batches(batch_size: 1_000)
|
83
|
+
return enum_for(:find_in_batches, batch_size: batch_size) unless block_given?
|
84
|
+
|
85
|
+
each_page "SELECT #{select_clause} FROM #{target.quote_table_name target.table_name} #{where_clause} #{order_clause} #{limit_clause}", page_size: batch_size do |result|
|
86
|
+
records = []
|
87
|
+
|
88
|
+
result.each do |row|
|
89
|
+
records << if select_values.present?
|
90
|
+
row
|
91
|
+
else
|
92
|
+
load_record(row)
|
93
|
+
end
|
94
|
+
end
|
95
|
+
|
96
|
+
yield(records) unless records.empty?
|
97
|
+
end
|
98
|
+
end
|
99
|
+
|
100
|
+
def delete_all
|
101
|
+
target.execute("DELETE FROM #{target.quote_table_name target.table_name} #{where_clause}")
|
102
|
+
|
103
|
+
true
|
104
|
+
end
|
105
|
+
|
106
|
+
def delete_in_batches
|
107
|
+
find_in_batches do |records|
|
108
|
+
records.each do |record|
|
109
|
+
where_clause = target.key_columns.map { |column, _| "#{target.quote_column_name column} = #{target.quote_value record.read_raw_attribute(column)}" }.join(" AND ")
|
110
|
+
|
111
|
+
target.execute "DELETE FROM #{target.quote_table_name target.table_name} WHERE #{where_clause}"
|
112
|
+
end
|
113
|
+
end
|
114
|
+
|
115
|
+
true
|
116
|
+
end
|
117
|
+
|
118
|
+
def count
|
119
|
+
cql = "SELECT COUNT(*) FROM #{target.quote_table_name target.table_name} #{where_clause}"
|
120
|
+
|
121
|
+
target.execute(cql).first["count"]
|
122
|
+
end
|
123
|
+
|
124
|
+
def to_a
|
125
|
+
@records ||= find_each.to_a
|
126
|
+
end
|
127
|
+
|
128
|
+
private
|
129
|
+
|
130
|
+
def load_record(row)
|
131
|
+
target.new.tap do |record|
|
132
|
+
record.persisted!
|
133
|
+
|
134
|
+
row.each do |key, value|
|
135
|
+
record.write_raw_attribute(key, value)
|
136
|
+
end
|
137
|
+
end
|
138
|
+
end
|
139
|
+
|
140
|
+
def fresh
|
141
|
+
dup.tap do |relation|
|
142
|
+
relation.instance_variable_set(:@records, nil)
|
143
|
+
end
|
144
|
+
end
|
145
|
+
|
146
|
+
def each_page(cql, page_size:)
|
147
|
+
result = target.execute(cql, page_size: page_size)
|
148
|
+
|
149
|
+
while result
|
150
|
+
yield result
|
151
|
+
|
152
|
+
result = result.next_page
|
153
|
+
end
|
154
|
+
end
|
155
|
+
|
156
|
+
def select_clause
|
157
|
+
"#{distinct_value ? "DISTINCT" : ""} #{select_values.presence ? select_values.join(", ") : "*"}"
|
158
|
+
end
|
159
|
+
|
160
|
+
def where_clause
|
161
|
+
return if where_values.blank? && where_cql_values.blank?
|
162
|
+
|
163
|
+
constraints = []
|
164
|
+
|
165
|
+
Array(where_values).each do |hash|
|
166
|
+
hash.each do |column, value|
|
167
|
+
constraints << if value.is_a?(Array) || value.is_a?(Range)
|
168
|
+
"#{target.quote_column_name column} IN (#{value.to_a.map { |v| target.quote_value v }.join(", ")})"
|
169
|
+
else
|
170
|
+
"#{target.quote_column_name column} = #{target.quote_value value}"
|
171
|
+
end
|
172
|
+
end
|
173
|
+
end
|
174
|
+
|
175
|
+
constraints += Array(where_cql_values)
|
176
|
+
|
177
|
+
"WHERE #{constraints.join(" AND ")}"
|
178
|
+
end
|
179
|
+
|
180
|
+
def order_clause
|
181
|
+
(order_values.presence ? "ORDER BY #{order_values.map { |column, value| "#{target.quote_column_name column} #{value}" }.join(", ")}" : "").to_s
|
182
|
+
end
|
183
|
+
|
184
|
+
def limit_clause
|
185
|
+
(limit_value ? "LIMIT #{limit_value.to_i}" : "").to_s
|
186
|
+
end
|
187
|
+
end
|
@@ -0,0 +1,11 @@
|
|
1
|
+
class CassandraStore::SchemaMigration < CassandraStore::Base
|
2
|
+
def self.table_name
|
3
|
+
"schema_migrations"
|
4
|
+
end
|
5
|
+
|
6
|
+
def self.create_table(if_not_exists: false)
|
7
|
+
execute "CREATE TABLE #{"IF NOT EXISTS" if if_not_exists} schema_migrations(version TEXT PRIMARY KEY)"
|
8
|
+
end
|
9
|
+
|
10
|
+
column :version, :text, partition_key: true
|
11
|
+
end
|
@@ -0,0 +1,45 @@
|
|
1
|
+
namespace :cassandra do
|
2
|
+
namespace :keyspace do
|
3
|
+
desc "Drop the keyspace"
|
4
|
+
task drop: :environment do
|
5
|
+
CassandraStore::Base.logger.level = Logger::DEBUG
|
6
|
+
CassandraStore::Base.drop_keyspace(if_exists: true)
|
7
|
+
end
|
8
|
+
|
9
|
+
desc "Create the keyspace"
|
10
|
+
task create: :environment do
|
11
|
+
CassandraStore::Base.logger.level = Logger::DEBUG
|
12
|
+
CassandraStore::Base.create_keyspace(if_not_exists: true)
|
13
|
+
end
|
14
|
+
end
|
15
|
+
|
16
|
+
namespace :migrate do
|
17
|
+
desc "Run a specific up-migration"
|
18
|
+
task up: :environment do
|
19
|
+
raise "No VERSION specified" unless ENV["VERSION"]
|
20
|
+
|
21
|
+
CassandraStore::Base.logger.level = Logger::DEBUG
|
22
|
+
|
23
|
+
CassandraStore::SchemaMigration.create_table(if_not_exists: true)
|
24
|
+
CassandraStore::Migration.up Rails.root.join("cassandra/migrate"), ENV["VERSION"]
|
25
|
+
end
|
26
|
+
|
27
|
+
desc "Run a specific down-migration"
|
28
|
+
task down: :environment do
|
29
|
+
raise "No VERSION specified" unless ENV["VERSION"]
|
30
|
+
|
31
|
+
CassandraStore::Base.logger.level = Logger::DEBUG
|
32
|
+
|
33
|
+
CassandraStore::SchemaMigration.create_table(if_not_exists: true)
|
34
|
+
CassandraStore::Migration.down Rails.root.join("cassandra/migrate"), ENV["VERSION"]
|
35
|
+
end
|
36
|
+
end
|
37
|
+
|
38
|
+
desc "Run pending migrations"
|
39
|
+
task migrate: :environment do
|
40
|
+
CassandraStore::Base.logger.level = Logger::DEBUG
|
41
|
+
|
42
|
+
CassandraStore::SchemaMigration.create_table(if_not_exists: true)
|
43
|
+
CassandraStore::Migration.migrate Rails.root.join("cassandra/migrate")
|
44
|
+
end
|
45
|
+
end
|
@@ -0,0 +1,691 @@
|
|
1
|
+
require File.expand_path("../spec_helper", __dir__)
|
2
|
+
|
3
|
+
class TestRecord < CassandraStore::Base
|
4
|
+
column :text, :text
|
5
|
+
column :boolean, :boolean
|
6
|
+
column :int, :int
|
7
|
+
column :bigint, :bigint
|
8
|
+
column :date, :date
|
9
|
+
column :timestamp, :timestamp
|
10
|
+
column :timeuuid, :timeuuid
|
11
|
+
column :uuid, :uuid
|
12
|
+
end
|
13
|
+
|
14
|
+
RSpec.describe CassandraStore::Base do
|
15
|
+
describe ".new" do
|
16
|
+
it "assigns the specified attributes" do
|
17
|
+
test_log = TestLog.new(timestamp: "2016-11-01 12:00:00", username: "username")
|
18
|
+
|
19
|
+
expect(Time.parse("2016-11-01 12:00:00").utc.round(3)).to eq(test_log.timestamp)
|
20
|
+
expect(test_log.username).to eq("username")
|
21
|
+
end
|
22
|
+
end
|
23
|
+
|
24
|
+
describe ".drop_keyspace" do
|
25
|
+
# Already tested
|
26
|
+
end
|
27
|
+
|
28
|
+
describe ".create_keyspace" do
|
29
|
+
# Already tested
|
30
|
+
end
|
31
|
+
|
32
|
+
describe ".quote_keyspace_name" do
|
33
|
+
it "delegates to quote_column_name" do
|
34
|
+
allow(described_class).to receive(:quote_column_name)
|
35
|
+
|
36
|
+
described_class.quote_keyspace_name("keyspace_name")
|
37
|
+
|
38
|
+
expect(described_class).to have_received(:quote_column_name).with("keyspace_name")
|
39
|
+
end
|
40
|
+
end
|
41
|
+
|
42
|
+
describe ".quote_table_name" do
|
43
|
+
it "delegates to quote_column_name" do
|
44
|
+
allow(described_class).to receive(:quote_column_name)
|
45
|
+
|
46
|
+
described_class.quote_table_name("table_name")
|
47
|
+
|
48
|
+
expect(described_class).to have_received(:quote_column_name).with("table_name")
|
49
|
+
end
|
50
|
+
end
|
51
|
+
|
52
|
+
describe ".quote_column_name" do
|
53
|
+
it "quotes the value" do
|
54
|
+
expect(described_class.quote_column_name("column_name")).to eq("\"column_name\"")
|
55
|
+
end
|
56
|
+
|
57
|
+
it "raises an ArgumentError if the value includes quotes" do
|
58
|
+
expect { described_class.quote_column_name("column\"name") }.to raise_error(ArgumentError)
|
59
|
+
end
|
60
|
+
end
|
61
|
+
|
62
|
+
describe ".quote_value" do
|
63
|
+
it "converts timestamps" do
|
64
|
+
expect(described_class.quote_value(Time.parse("2020-05-21 12:00:00 UTC"))).to eq("1590062400000")
|
65
|
+
end
|
66
|
+
|
67
|
+
it "quotes datetimes" do
|
68
|
+
expect(described_class.quote_value(DateTime.new(2020, 5, 21, 12, 0, 0))).to eq("1590062400000")
|
69
|
+
end
|
70
|
+
|
71
|
+
it "quotes dates" do
|
72
|
+
expect(described_class.quote_value(Date.new(2020, 5, 21))).to eq("'2020-05-21'")
|
73
|
+
end
|
74
|
+
|
75
|
+
it "does not quote numerics" do
|
76
|
+
expect(described_class.quote_value(19)).to eq("19")
|
77
|
+
expect(described_class.quote_value(19.5)).to eq("19.5")
|
78
|
+
end
|
79
|
+
|
80
|
+
it "does not quote booleans" do
|
81
|
+
expect(described_class.quote_value(true)).to eq("true")
|
82
|
+
expect(described_class.quote_value(false)).to eq("false")
|
83
|
+
end
|
84
|
+
|
85
|
+
it "does not quote cassandra uuids" do
|
86
|
+
expect(described_class.quote_value(Cassandra::Uuid.new("50554d6e-29bb-11e5-b345-feff819cdc9f"))).to eq("50554d6e-29bb-11e5-b345-feff819cdc9f")
|
87
|
+
expect(described_class.quote_value(Cassandra::TimeUuid.new("e3341564-9b5f-11ea-8fa9-315018f39af9"))).to eq("e3341564-9b5f-11ea-8fa9-315018f39af9")
|
88
|
+
end
|
89
|
+
|
90
|
+
it "quotes strings" do
|
91
|
+
expect(described_class.quote_value("some value")).to eq("'some value'")
|
92
|
+
expect(described_class.quote_value("some'value")).to eq("'some''value'")
|
93
|
+
end
|
94
|
+
end
|
95
|
+
|
96
|
+
describe "#assign" do
|
97
|
+
it "assigns the specified attributes" do
|
98
|
+
test_log = TestLog.new
|
99
|
+
test_log.assign(timestamp: "2016-11-01 12:00:00", username: "username")
|
100
|
+
|
101
|
+
expect(Time.parse("2016-11-01 12:00:00").utc.round(3)).to eq(test_log.timestamp)
|
102
|
+
expect(test_log.username).to eq("username")
|
103
|
+
end
|
104
|
+
|
105
|
+
it "raises an ArgumentError when re-assigning a key attribute" do
|
106
|
+
test_log = TestLog.create!(timestamp: Time.parse("2016-11-01 12:00:00"))
|
107
|
+
|
108
|
+
expect(test_log.persisted?).to eq(true)
|
109
|
+
expect { test_log.assign(date: Date.parse("2016-11-02")) }.to raise_error(ArgumentError)
|
110
|
+
end
|
111
|
+
end
|
112
|
+
|
113
|
+
describe "#attributes" do
|
114
|
+
it "returns the attributes as a hash" do
|
115
|
+
test_log = TestLog.new(timestamp: "2016-11-01 12:00:00", username: "username")
|
116
|
+
|
117
|
+
expect(test_log.attributes).to eq(
|
118
|
+
date: nil,
|
119
|
+
bucket: nil,
|
120
|
+
id: nil,
|
121
|
+
query: nil,
|
122
|
+
username: "username",
|
123
|
+
timestamp: Time.parse("2016-11-01 12:00:00").utc.round(3)
|
124
|
+
)
|
125
|
+
end
|
126
|
+
end
|
127
|
+
|
128
|
+
describe ".cast_value" do
|
129
|
+
it "casts string attributes" do
|
130
|
+
expect(TestRecord.new(text: "text").text).to eq("text")
|
131
|
+
expect(TestRecord.new(text: 1).text).to eq("1")
|
132
|
+
end
|
133
|
+
|
134
|
+
it "casts boolean attributes" do
|
135
|
+
expect(TestRecord.new(boolean: true).boolean).to eq(true)
|
136
|
+
expect(TestRecord.new(boolean: false).boolean).to eq(false)
|
137
|
+
expect(TestRecord.new(boolean: 1).boolean).to eq(true)
|
138
|
+
expect(TestRecord.new(boolean: 0).boolean).to eq(false)
|
139
|
+
expect(TestRecord.new(boolean: "1").boolean).to eq(true)
|
140
|
+
expect(TestRecord.new(boolean: "0").boolean).to eq(false)
|
141
|
+
expect(TestRecord.new(boolean: "true").boolean).to eq(true)
|
142
|
+
expect(TestRecord.new(boolean: "false").boolean).to eq(false)
|
143
|
+
expect { TestRecord.new(boolean: :other).boolean }.to raise_error(ArgumentError)
|
144
|
+
end
|
145
|
+
|
146
|
+
it "casts int attributes" do
|
147
|
+
expect(TestRecord.new(int: 1).int).to eq(1)
|
148
|
+
expect(TestRecord.new(int: "1").int).to eq(1)
|
149
|
+
expect(TestRecord.new(int: 1.0).int).to eq(1)
|
150
|
+
expect { TestRecord.new(int: :other).int }.to raise_error(TypeError)
|
151
|
+
end
|
152
|
+
|
153
|
+
it "casts bigint attributes" do
|
154
|
+
expect(TestRecord.new(bigint: 1).bigint).to eq(1)
|
155
|
+
expect(TestRecord.new(bigint: "1").bigint).to eq(1)
|
156
|
+
expect(TestRecord.new(bigint: 1.0).bigint).to eq(1)
|
157
|
+
expect { TestRecord.new(bigint: :other).int }.to raise_error(TypeError)
|
158
|
+
end
|
159
|
+
|
160
|
+
it "casts date attributes" do
|
161
|
+
expect(TestRecord.new(date: Date.new(2016, 11, 1)).date).to eq(Date.new(2016, 11, 1))
|
162
|
+
expect(TestRecord.new(date: "2016-11-01").date).to eq(Date.new(2016, 11, 1))
|
163
|
+
expect { TestRecord.new(date: :other).date }.to raise_error(ArgumentError)
|
164
|
+
end
|
165
|
+
|
166
|
+
it "casts timestamp attributes" do
|
167
|
+
expect(TestRecord.new(timestamp: Time.parse("2016-11-01 12:00:00")).timestamp).to eq(Time.parse("2016-11-01 12:00:00").utc.round(3))
|
168
|
+
expect(TestRecord.new(timestamp: "2016-11-01 12:00:00").timestamp).to eq(Time.parse("2016-11-01 12:00:00").utc.round(3))
|
169
|
+
expect(TestRecord.new(timestamp: Time.parse("2016-11-01 12:00:00").to_i).timestamp).to eq(Time.parse("2016-11-01 12:00:00").utc.round(3))
|
170
|
+
expect { TestRecord.new(timestamp: :other).timestamp }.to raise_error(ArgumentError)
|
171
|
+
end
|
172
|
+
|
173
|
+
it "casts timeuuid attributes" do
|
174
|
+
expect(TestRecord.new(timeuuid: Cassandra::TimeUuid.new("1ce29e82-b2ea-11e6-88fa-2971245f69e1")).timeuuid).to eq(Cassandra::TimeUuid.new("1ce29e82-b2ea-11e6-88fa-2971245f69e1"))
|
175
|
+
expect(TestRecord.new(timeuuid: "1ce29e82-b2ea-11e6-88fa-2971245f69e2").timeuuid).to eq(Cassandra::TimeUuid.new("1ce29e82-b2ea-11e6-88fa-2971245f69e2"))
|
176
|
+
expect(TestRecord.new(timeuuid: 38_395_057_947_756_324_226_486_198_980_982_041_059).timeuuid).to eq(Cassandra::TimeUuid.new(38_395_057_947_756_324_226_486_198_980_982_041_059))
|
177
|
+
expect { TestRecord.new(timeuuid: :other).timeuuid }.to raise_error(ArgumentError)
|
178
|
+
end
|
179
|
+
|
180
|
+
it "casts uuid attributes" do
|
181
|
+
expect(TestRecord.new(uuid: Cassandra::Uuid.new("b9af7b9b-9317-43b3-922e-fe303f5942c1")).uuid).to eq(Cassandra::Uuid.new("b9af7b9b-9317-43b3-922e-fe303f5942c1"))
|
182
|
+
expect(TestRecord.new(uuid: "b9af7b9b-9317-43b3-922e-fe303f5942c1").uuid).to eq(Cassandra::Uuid.new("b9af7b9b-9317-43b3-922e-fe303f5942c1"))
|
183
|
+
expect(TestRecord.new(uuid: 13_466_612_472_233_423_808_722_080_080_896_418_394).uuid).to eq(Cassandra::Uuid.new(13_466_612_472_233_423_808_722_080_080_896_418_394))
|
184
|
+
expect { TestRecord.new(uuid: :other).uuid }.to raise_error(ArgumentError)
|
185
|
+
end
|
186
|
+
end
|
187
|
+
|
188
|
+
describe "#save" do
|
189
|
+
it "returns false when validation fails" do
|
190
|
+
test_log = TestLog.new
|
191
|
+
|
192
|
+
expect(test_log.save).to eq(false)
|
193
|
+
end
|
194
|
+
|
195
|
+
it "does not persist the record when validation fails" do
|
196
|
+
test_log = TestLog.new
|
197
|
+
|
198
|
+
expect { test_log.save }.not_to(change { TestLog.count })
|
199
|
+
end
|
200
|
+
|
201
|
+
it "adds the errors when validation fails" do
|
202
|
+
test_log = TestLog.new
|
203
|
+
test_log.save
|
204
|
+
|
205
|
+
expect(test_log.errors[:timestamp]).to include("can't be blank")
|
206
|
+
end
|
207
|
+
|
208
|
+
it "persists the record" do
|
209
|
+
test_log = TestLog.new(timestamp: Time.parse("2016-11-01 12:00:00"), username: "username")
|
210
|
+
|
211
|
+
expect { test_log.save }.to change { TestLog.count }.by(1)
|
212
|
+
expect(test_log.persisted?).to eq(true)
|
213
|
+
|
214
|
+
reloaded_test_log = TestLog.where(date: test_log.date, bucket: test_log.bucket, id: test_log.id).first
|
215
|
+
|
216
|
+
expect(reloaded_test_log.attributes).to eq(test_log.attributes)
|
217
|
+
end
|
218
|
+
|
219
|
+
it "executes the hooks" do
|
220
|
+
test_log = TestLog.new(timestamp: Time.parse("2016-11-01 12:00:00"), username: "username")
|
221
|
+
test_log.save
|
222
|
+
|
223
|
+
expect(test_log.date).to eq(Date.parse("2016-11-01"))
|
224
|
+
expect(test_log.username).to eq("username")
|
225
|
+
expect(test_log.bucket).to be_present
|
226
|
+
expect(test_log.id).to be_present
|
227
|
+
end
|
228
|
+
end
|
229
|
+
|
230
|
+
describe "#save!" do
|
231
|
+
it "raises an error if validation fails" do
|
232
|
+
test_log = TestLog.new
|
233
|
+
|
234
|
+
expect { test_log.save! }.to raise_error(CassandraStore::RecordInvalid)
|
235
|
+
end
|
236
|
+
|
237
|
+
it "does not persist the record if validation fails" do
|
238
|
+
test_log = TestLog.new
|
239
|
+
|
240
|
+
block = proc do
|
241
|
+
begin
|
242
|
+
test_log.save!
|
243
|
+
rescue StandardError
|
244
|
+
nil
|
245
|
+
end
|
246
|
+
end
|
247
|
+
|
248
|
+
expect(&block).not_to(change { TestLog.count })
|
249
|
+
end
|
250
|
+
|
251
|
+
it "returns true when the record can be persisted" do
|
252
|
+
test_log = TestLog.new(timestamp: Time.parse("2016-11-01 12:00:00"))
|
253
|
+
|
254
|
+
block = proc do
|
255
|
+
begin
|
256
|
+
test_log.save!
|
257
|
+
rescue StandardError
|
258
|
+
nil
|
259
|
+
end
|
260
|
+
end
|
261
|
+
|
262
|
+
expect(&block).to(change { TestLog.count }.by(1))
|
263
|
+
end
|
264
|
+
|
265
|
+
it "persists the record" do
|
266
|
+
test_log = TestLog.new(timestamp: Time.parse("2016-11-01 12:00:00"), username: "username")
|
267
|
+
|
268
|
+
expect { test_log.save! }.to change { TestLog.count }.by(1)
|
269
|
+
expect(test_log.persisted?).to eq(true)
|
270
|
+
|
271
|
+
reloaded_test_log = TestLog.where(date: test_log.date, bucket: test_log.bucket, id: test_log.id).first
|
272
|
+
|
273
|
+
expect(reloaded_test_log.attributes).to eq(test_log.attributes)
|
274
|
+
end
|
275
|
+
|
276
|
+
it "executes the hooks" do
|
277
|
+
test_log = TestLog.new(timestamp: Time.parse("2016-11-01 12:00:00"), username: "username")
|
278
|
+
test_log.save!
|
279
|
+
|
280
|
+
expect(test_log.date).to eq(Date.parse("2016-11-01"))
|
281
|
+
expect(test_log.username).to eq("username")
|
282
|
+
expect(test_log.bucket).to be_present
|
283
|
+
expect(test_log.id).to be_present
|
284
|
+
end
|
285
|
+
end
|
286
|
+
|
287
|
+
describe "#valid?" do
|
288
|
+
it "respects the validation context for create" do
|
289
|
+
test_log = TestLogWithContext.new
|
290
|
+
test_log.valid?
|
291
|
+
|
292
|
+
expect(test_log.errors[:username]).to include("can't be blank")
|
293
|
+
end
|
294
|
+
|
295
|
+
it "respects the validation context for update" do
|
296
|
+
test_log = TestLogWithContext.create!(username: "username", timestamp: Time.now)
|
297
|
+
test_log.username = nil
|
298
|
+
test_log.valid?
|
299
|
+
|
300
|
+
expect(test_log.errors[:username]).not_to include("can't be blank")
|
301
|
+
expect(test_log.errors[:query]).to include("can't be blank")
|
302
|
+
end
|
303
|
+
end
|
304
|
+
|
305
|
+
describe ".create" do
|
306
|
+
it "assigns the attributes" do
|
307
|
+
test_log = TestLog.create(timestamp: Time.parse("2016-11-01 12:00:00"), username: "username")
|
308
|
+
|
309
|
+
expect(test_log.timestamp).to eq(Time.parse("2016-11-01 12:00:00").utc.round(3))
|
310
|
+
expect(test_log.username).to eq("username")
|
311
|
+
end
|
312
|
+
|
313
|
+
it "delegates to save" do
|
314
|
+
allow_any_instance_of(TestLog).to receive(:save).and_raise("delegated")
|
315
|
+
|
316
|
+
expect { TestLog.create(timestamp: Time.parse("2016-11-01 12:00:00"), username: "username") }.to raise_error("delegated")
|
317
|
+
end
|
318
|
+
end
|
319
|
+
|
320
|
+
describe ".create!" do
|
321
|
+
it "assigns the attributes" do
|
322
|
+
test_log = TestLog.create!(timestamp: Time.parse("2016-11-01 12:00:00"), username: "username")
|
323
|
+
|
324
|
+
expect(test_log.timestamp).to eq(Time.parse("2016-11-01 12:00:00").utc.round(3))
|
325
|
+
expect(test_log.username).to eq("username")
|
326
|
+
end
|
327
|
+
|
328
|
+
it "delegates to save!" do
|
329
|
+
allow_any_instance_of(TestLog).to receive(:save!).and_raise("delegated")
|
330
|
+
|
331
|
+
expect { TestLog.create!(timestamp: Time.parse("2016-11-01 12:00:00"), username: "username") }.to raise_error("delegated")
|
332
|
+
end
|
333
|
+
end
|
334
|
+
|
335
|
+
describe "#update" do
|
336
|
+
it "assigns the attributes" do
|
337
|
+
test_log = TestLog.create(timestamp: Time.parse("2016-11-01 12:00:00"), username: "username")
|
338
|
+
test_log.update(username: "new username", timestamp: Time.parse("2016-11-02 12:00:00"))
|
339
|
+
|
340
|
+
expect(test_log.username).to eq("new username")
|
341
|
+
expect(test_log.timestamp).to eq(Time.parse("2016-11-02 12:00:00").utc.round(3))
|
342
|
+
end
|
343
|
+
|
344
|
+
it "delegates to save" do
|
345
|
+
test_log = TestLog.create(timestamp: Time.parse("2016-11-01 12:00:00"), username: "username")
|
346
|
+
|
347
|
+
allow(test_log).to receive(:save)
|
348
|
+
|
349
|
+
test_log.update(username: "new username", timestamp: Time.parse("2016-11-02 12:00:00"))
|
350
|
+
|
351
|
+
expect(test_log).to have_received(:save)
|
352
|
+
end
|
353
|
+
|
354
|
+
it "returns true when the update is successfull" do
|
355
|
+
test_log = TestLog.create(timestamp: Time.parse("2016-11-01 12:00:00"), username: "username")
|
356
|
+
|
357
|
+
expect(test_log.update(username: "new username", timestamp: Time.parse("2016-11-02 12:00:00"))).to eq(true)
|
358
|
+
end
|
359
|
+
|
360
|
+
it "returns false when the update fails" do
|
361
|
+
test_log = TestLog.create(timestamp: Time.parse("2016-11-01 12:00:00"), username: "username")
|
362
|
+
|
363
|
+
expect(test_log.update(timestamp: nil)).to eq(false)
|
364
|
+
end
|
365
|
+
end
|
366
|
+
|
367
|
+
describe "#update!" do
|
368
|
+
it "assigns the attributes" do
|
369
|
+
test_log = TestLog.create(timestamp: Time.parse("2016-11-01 12:00:00"), username: "username")
|
370
|
+
test_log.update!(username: "new username", timestamp: Time.parse("2016-11-02 12:00:00"))
|
371
|
+
|
372
|
+
expect(test_log.username).to eq("new username")
|
373
|
+
expect(test_log.timestamp).to eq(Time.parse("2016-11-02 12:00:00").utc.round(3))
|
374
|
+
end
|
375
|
+
|
376
|
+
it "delegates to save!" do
|
377
|
+
test_log = TestLog.create(timestamp: Time.parse("2016-11-01 12:00:00"), username: "username")
|
378
|
+
|
379
|
+
allow(test_log).to receive(:save!)
|
380
|
+
|
381
|
+
test_log.update!(username: "new username", timestamp: Time.parse("2016-11-02 12:00:00"))
|
382
|
+
|
383
|
+
expect(test_log).to have_received(:save!)
|
384
|
+
end
|
385
|
+
|
386
|
+
it "returns true when the update is successfull" do
|
387
|
+
test_log = TestLog.create(timestamp: Time.parse("2016-11-01 12:00:00"), username: "username")
|
388
|
+
|
389
|
+
expect(test_log.update!(username: "new username", timestamp: Time.parse("2016-11-02 12:00:00"))).to eq(true)
|
390
|
+
end
|
391
|
+
end
|
392
|
+
|
393
|
+
describe "#persisted" do
|
394
|
+
it "returns false if the record is not persisted" do
|
395
|
+
test_log = TestLog.new(timestamp: Time.parse("2016-11-01 12:00:00"))
|
396
|
+
|
397
|
+
expect(test_log.persisted?).to eq(false)
|
398
|
+
end
|
399
|
+
|
400
|
+
it "returns true if the record is persisted" do
|
401
|
+
test_log = TestLog.create(timestamp: Time.parse("2016-11-01 12:00:00"))
|
402
|
+
|
403
|
+
expect(test_log.persisted?).to eq(true)
|
404
|
+
end
|
405
|
+
end
|
406
|
+
|
407
|
+
describe "#new_record?" do
|
408
|
+
it "returns true if the record is not yet persisted" do
|
409
|
+
test_log = TestLog.new(timestamp: Time.parse("2016-11-01 12:00:00"))
|
410
|
+
|
411
|
+
expect(test_log.new_record?).to eq(true)
|
412
|
+
end
|
413
|
+
|
414
|
+
it "returns false if the record is persisted" do
|
415
|
+
test_log = TestLog.create(timestamp: Time.parse("2016-11-01 12:00:00"))
|
416
|
+
|
417
|
+
expect(test_log.new_record?).to eq(false)
|
418
|
+
end
|
419
|
+
end
|
420
|
+
|
421
|
+
describe "#delete" do
|
422
|
+
it "deletes the record" do
|
423
|
+
test_log1 = TestLog.create(timestamp: Time.parse("2016-11-01 12:00:00"))
|
424
|
+
test_log2 = TestLog.create(timestamp: Time.parse("2016-11-01 12:00:00"))
|
425
|
+
|
426
|
+
test_log1.delete
|
427
|
+
|
428
|
+
expect(TestLog.all.to_a).to eq([test_log2])
|
429
|
+
end
|
430
|
+
end
|
431
|
+
|
432
|
+
describe "#destroy" do
|
433
|
+
it "deletes the record and updates the destroyed info" do
|
434
|
+
test_log1 = TestLog.create(timestamp: Time.parse("2016-11-01 12:00:00"))
|
435
|
+
test_log2 = TestLog.create(timestamp: Time.parse("2016-11-01 12:00:00"))
|
436
|
+
|
437
|
+
test_log1.destroy
|
438
|
+
|
439
|
+
expect(TestLog.all.to_a).to eq([test_log2])
|
440
|
+
expect(test_log1.destroyed?).to eq(true)
|
441
|
+
end
|
442
|
+
end
|
443
|
+
|
444
|
+
describe "#destroyed?" do
|
445
|
+
it "returns false when the record was not yet destroyed" do
|
446
|
+
test_log = TestLog.new(timestamp: Time.parse("2016-11-01 12:00:00"))
|
447
|
+
|
448
|
+
expect(test_log.destroyed?).to eq(false)
|
449
|
+
end
|
450
|
+
|
451
|
+
it "returns true when the record was destroyed" do
|
452
|
+
test_log = TestLog.create(timestamp: Time.parse("2016-11-01 12:00:00"))
|
453
|
+
test_log.destroy
|
454
|
+
|
455
|
+
expect(test_log.destroyed?).to eq(true)
|
456
|
+
end
|
457
|
+
end
|
458
|
+
|
459
|
+
describe ".table_name" do
|
460
|
+
it "returns the table name" do
|
461
|
+
expect(TestLog.table_name).to eq("test_logs")
|
462
|
+
end
|
463
|
+
end
|
464
|
+
|
465
|
+
describe ".truncate_table" do
|
466
|
+
it "deletes all records" do
|
467
|
+
TestLog.create!(timestamp: Time.parse("2016-11-01 12:00:00"))
|
468
|
+
TestLog.create!(timestamp: Time.parse("2016-11-02 12:00:00"))
|
469
|
+
|
470
|
+
expect { TestLog.truncate_table }.to change { TestLog.count }.by(-2)
|
471
|
+
end
|
472
|
+
end
|
473
|
+
|
474
|
+
describe ".statement" do
|
475
|
+
it "inserts the specified placeholders and quotes the values" do
|
476
|
+
statement = TestLog.statement(
|
477
|
+
"SELECT * FROM table WHERE date = :date AND id = :id AND message = :message",
|
478
|
+
date: Date.parse("2016-12-06"),
|
479
|
+
id: 1,
|
480
|
+
message: "some'value"
|
481
|
+
)
|
482
|
+
|
483
|
+
expect(statement).to eq("SELECT * FROM table WHERE date = '2016-12-06' AND id = 1 AND message = 'some''value'")
|
484
|
+
end
|
485
|
+
end
|
486
|
+
|
487
|
+
describe ".cluster_execute" do
|
488
|
+
it "raises when a table is accessed without keyspace" do
|
489
|
+
expect { CassandraStore::Base.cluster_execute("SELECT * FROM test_logs") }.to raise_error(/No keyspace has been specified/)
|
490
|
+
end
|
491
|
+
|
492
|
+
it "executes the statement and returns the result" do
|
493
|
+
records = [
|
494
|
+
TestLog.create!(timestamp: Time.parse("2016-11-01 12:00:00")),
|
495
|
+
TestLog.create!(timestamp: Time.parse("2016-11-02 12:00:00"))
|
496
|
+
]
|
497
|
+
|
498
|
+
expect(CassandraStore::Base.execute("SELECT * FROM cassandra_store.test_logs", consistency: :all).map { |row| row["id"] }.to_set).to eq(records.map(&:id).to_set)
|
499
|
+
end
|
500
|
+
end
|
501
|
+
|
502
|
+
describe ".execute" do
|
503
|
+
it "executes the statement and returns the result" do
|
504
|
+
records = [
|
505
|
+
TestLog.create!(timestamp: Time.parse("2016-11-01 12:00:00")),
|
506
|
+
TestLog.create!(timestamp: Time.parse("2016-11-02 12:00:00"))
|
507
|
+
]
|
508
|
+
|
509
|
+
expect(TestLog.execute("SELECT * FROM test_logs", consistency: :all).map { |row| row["id"] }.to_set).to eq(records.map(&:id).to_set)
|
510
|
+
end
|
511
|
+
end
|
512
|
+
|
513
|
+
describe ".execute_batch" do
|
514
|
+
it "executes the statements" do
|
515
|
+
records = [
|
516
|
+
TestLog.create!(timestamp: Time.parse("2016-11-01 12:00:00")),
|
517
|
+
TestLog.create!(timestamp: Time.parse("2016-11-02 12:00:00"))
|
518
|
+
]
|
519
|
+
|
520
|
+
batch = [
|
521
|
+
"DELETE FROM test_logs WHERE date = '#{records[0].date.strftime("%F")}' AND bucket = #{records[0].bucket} AND id = #{records[0].id}",
|
522
|
+
"DELETE FROM test_logs WHERE date = '#{records[1].date.strftime("%F")}' AND bucket = #{records[1].bucket} AND id = #{records[1].id}"
|
523
|
+
]
|
524
|
+
|
525
|
+
expect { TestLog.execute_batch(batch, consistency: :all) }.to change { TestLog.count }.by(-2)
|
526
|
+
end
|
527
|
+
end
|
528
|
+
|
529
|
+
describe "#callbacks" do
|
530
|
+
let(:temp_log) do
|
531
|
+
Class.new(TestLog) do
|
532
|
+
def self.table_name
|
533
|
+
"test_logs"
|
534
|
+
end
|
535
|
+
|
536
|
+
def called_callbacks
|
537
|
+
@called_callbacks ||= []
|
538
|
+
end
|
539
|
+
|
540
|
+
def reset_called_callbacks
|
541
|
+
@called_callbacks = []
|
542
|
+
end
|
543
|
+
|
544
|
+
before_validation { called_callbacks << :before_validation }
|
545
|
+
after_validation { called_callbacks << :after_validation }
|
546
|
+
before_save { called_callbacks << :before_save }
|
547
|
+
after_save { called_callbacks << :after_save }
|
548
|
+
before_create { called_callbacks << :before_create }
|
549
|
+
after_create { called_callbacks << :after_create }
|
550
|
+
before_update { called_callbacks << :before_update }
|
551
|
+
after_update { called_callbacks << :after_update }
|
552
|
+
before_destroy { called_callbacks << :before_destroy }
|
553
|
+
after_destroy { called_callbacks << :after_destroy }
|
554
|
+
end
|
555
|
+
end
|
556
|
+
|
557
|
+
it "executes the correct callbacks in the correct order on create" do
|
558
|
+
record = temp_log.create!(timestamp: Time.now)
|
559
|
+
|
560
|
+
expect(record.called_callbacks).to eq([:before_validation, :after_validation, :before_save, :before_create, :after_create, :after_save])
|
561
|
+
end
|
562
|
+
|
563
|
+
it "executes the correct callbacks in the correct order on update" do
|
564
|
+
record = temp_log.create!(timestamp: Time.now)
|
565
|
+
record.reset_called_callbacks
|
566
|
+
record.save
|
567
|
+
|
568
|
+
expect(record.called_callbacks).to eq([:before_validation, :after_validation, :before_save, :before_update, :after_update, :after_save])
|
569
|
+
end
|
570
|
+
|
571
|
+
it "executes the correct callbacks in the correct order on destroy" do
|
572
|
+
record = temp_log.create!(timestamp: Time.now)
|
573
|
+
record.reset_called_callbacks
|
574
|
+
record.destroy
|
575
|
+
|
576
|
+
expect(record.called_callbacks).to eq([:before_destroy, :after_destroy])
|
577
|
+
end
|
578
|
+
end
|
579
|
+
|
580
|
+
describe "#validate!" do
|
581
|
+
it "raises CassandraStore::RecordInvalid if validation fails" do
|
582
|
+
TestLog.new(timestamp: Time.now).validate!
|
583
|
+
|
584
|
+
expect { TestLog.new.validate! }.to raise_error(CassandraStore::RecordInvalid)
|
585
|
+
end
|
586
|
+
end
|
587
|
+
|
588
|
+
describe "dirty attributes" do
|
589
|
+
let(:timestamp) { Time.now }
|
590
|
+
|
591
|
+
it "returns true for dirty attributes" do
|
592
|
+
test_log = TestLog.new(timestamp: timestamp, username: "username")
|
593
|
+
|
594
|
+
expect(test_log.timestamp_changed?).to eq(true)
|
595
|
+
expect(test_log.username_changed?).to eq(true)
|
596
|
+
|
597
|
+
expect(test_log.changes).to eq("timestamp" => [nil, timestamp.utc.round(3)], "username" => [nil, "username"])
|
598
|
+
end
|
599
|
+
|
600
|
+
it "resets the dirty attributes after save" do
|
601
|
+
test_log = TestLog.new(timestamp: timestamp, username: "username")
|
602
|
+
test_log.save!
|
603
|
+
|
604
|
+
expect(test_log.timestamp_changed?).to eq(false)
|
605
|
+
expect(test_log.username_changed?).to eq(false)
|
606
|
+
|
607
|
+
expect(test_log.changes).to be_blank
|
608
|
+
end
|
609
|
+
end
|
610
|
+
|
611
|
+
describe ".key_columns" do
|
612
|
+
it "returns the key columns" do
|
613
|
+
expect(TestLog.key_columns).to eq(
|
614
|
+
date: { type: :date, partition_key: true, clustering_key: false },
|
615
|
+
bucket: { type: :int, partition_key: true, clustering_key: false },
|
616
|
+
id: { type: :timeuuid, partition_key: false, clustering_key: true }
|
617
|
+
)
|
618
|
+
end
|
619
|
+
end
|
620
|
+
|
621
|
+
describe ".clustering_key_columns" do
|
622
|
+
it "returns the clustering key columns" do
|
623
|
+
expect(TestLog.clustering_key_columns).to eq(id: { type: :timeuuid, partition_key: false, clustering_key: true })
|
624
|
+
end
|
625
|
+
end
|
626
|
+
|
627
|
+
describe ".parition_key_columns" do
|
628
|
+
it "returns the partition key columns" do
|
629
|
+
expect(TestLog.partition_key_columns).to eq(
|
630
|
+
date: { type: :date, partition_key: true, clustering_key: false },
|
631
|
+
bucket: { type: :int, partition_key: true, clustering_key: false }
|
632
|
+
)
|
633
|
+
end
|
634
|
+
end
|
635
|
+
|
636
|
+
describe "#key_values" do
|
637
|
+
it "returns the values of the keys" do
|
638
|
+
date = Date.today
|
639
|
+
bucket = 1
|
640
|
+
id = Cassandra::TimeUuid::Generator.new.at(Time.now)
|
641
|
+
|
642
|
+
expect(TestLog.new(date: date, bucket: bucket, id: id).key_values).to eq([date, bucket, id])
|
643
|
+
end
|
644
|
+
end
|
645
|
+
|
646
|
+
describe "equality" do
|
647
|
+
let(:generator) { Cassandra::TimeUuid::Generator.new }
|
648
|
+
let(:id) { generator.at(Time.parse("2017-01-01 12:00:00")) }
|
649
|
+
|
650
|
+
it "returns true if the records have the same key values" do
|
651
|
+
record1 = TestLog.new(date: Date.parse("2017-01-01"), bucket: 1, id: id, username: "username1")
|
652
|
+
record2 = TestLog.new(date: Date.parse("2017-01-01"), bucket: 1, id: id, username: "username2")
|
653
|
+
|
654
|
+
expect(record1).to eq(record2)
|
655
|
+
end
|
656
|
+
|
657
|
+
it "returns false if auto generated keys are not the same" do
|
658
|
+
record1 = TestLog.new(date: Date.parse("2017-01-01"), bucket: 1, id: generator.at(Time.parse("2017-01-01 12:00:00")))
|
659
|
+
record2 = TestLog.new(date: Date.parse("2017-01-01"), bucket: 1, id: generator.at(Time.parse("2017-01-01 12:00:00")))
|
660
|
+
|
661
|
+
expect(record1).not_to eq(record2)
|
662
|
+
end
|
663
|
+
|
664
|
+
it "returns false if key values are not the same" do
|
665
|
+
record1 = TestLog.new(date: Date.parse("2017-01-01"), bucket: 1, id: id)
|
666
|
+
record2 = TestLog.new(date: Date.parse("2017-01-01"), bucket: 2, id: id)
|
667
|
+
|
668
|
+
expect(record1).not_to eq(record2)
|
669
|
+
end
|
670
|
+
end
|
671
|
+
|
672
|
+
describe ".generate_uuid" do
|
673
|
+
it "generates a uuid" do
|
674
|
+
expect(TestLog.new.send(:generate_uuid)).to be_instance_of(Cassandra::Uuid)
|
675
|
+
expect(TestLog.new.send(:generate_uuid).to_s).to match(/\A[0-9a-f]+-[0-9a-f]+-[0-9a-f]+-[0-9a-f]+-[0-9a-f]+\z/)
|
676
|
+
end
|
677
|
+
end
|
678
|
+
|
679
|
+
describe ".generate_timeuuid" do
|
680
|
+
it "generates a timeuuid" do
|
681
|
+
expect(TestLog.new.send(:generate_timeuuid)).to be_instance_of(Cassandra::TimeUuid)
|
682
|
+
expect(TestLog.new.send(:generate_timeuuid).to_s).to match(/\A[0-9a-f]+-[0-9a-f]+-[0-9a-f]+-[0-9a-f]+-[0-9a-f]+\z/)
|
683
|
+
end
|
684
|
+
|
685
|
+
it "respects a passed a timestamp" do
|
686
|
+
timestamp = Time.parse("2020-05-20 12:00:00")
|
687
|
+
|
688
|
+
expect(TestLog.new.send(:generate_timeuuid, timestamp).to_time.utc.round).to eq(timestamp.utc.round)
|
689
|
+
end
|
690
|
+
end
|
691
|
+
end
|