logstash-output-cassandra 0.9.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/CONTRIBUTORS +7 -0
- data/Gemfile +4 -0
- data/LICENSE +218 -0
- data/README.md +148 -0
- data/lib/logstash/outputs/cassandra.rb +164 -0
- data/lib/logstash/outputs/cassandra/backoff_retry_policy.rb +65 -0
- data/lib/logstash/outputs/cassandra/buffer.rb +125 -0
- data/lib/logstash/outputs/cassandra/event_parser.rb +161 -0
- data/lib/logstash/outputs/cassandra/safe_submitter.rb +118 -0
- data/logstash-output-cassandra.gemspec +35 -0
- data/spec/cassandra_spec_helper.rb +14 -0
- data/spec/integration/outputs/cassandra_spec.rb +115 -0
- data/spec/integration/outputs/integration_helper.rb +91 -0
- data/spec/unit/outputs/backoff_retry_policy_spec.rb +131 -0
- data/spec/unit/outputs/buffer_spec.rb +119 -0
- data/spec/unit/outputs/cassandra_spec.rb +5 -0
- data/spec/unit/outputs/event_parser_spec.rb +304 -0
- data/spec/unit/outputs/safe_submitter_spec.rb +201 -0
- metadata +266 -0
@@ -0,0 +1,304 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require_relative '../../cassandra_spec_helper'
|
3
|
+
require 'logstash/outputs/cassandra/event_parser'
|
4
|
+
|
5
|
+
RSpec.describe LogStash::Outputs::Cassandra::EventParser do
|
6
|
+
let(:sut) { LogStash::Outputs::Cassandra::EventParser }
|
7
|
+
let(:default_opts) {
|
8
|
+
logger = double
|
9
|
+
allow(logger).to(receive(:debug))
|
10
|
+
return {
|
11
|
+
'logger' => logger,
|
12
|
+
'table' => 'dummy',
|
13
|
+
'filter_transform_event_key' => nil,
|
14
|
+
'filter_transform' => [],
|
15
|
+
'hints' => {},
|
16
|
+
'ignore_bad_values' => false
|
17
|
+
}
|
18
|
+
}
|
19
|
+
let(:sample_event) { LogStash::Event.new('message' => 'sample message here') }
|
20
|
+
|
21
|
+
describe 'table name parsing' do
|
22
|
+
it 'leaves regular table names unchanged' do
|
23
|
+
sut_instance = sut.new(default_opts.update({ 'table' => 'simple' }))
|
24
|
+
|
25
|
+
action = sut_instance.parse(sample_event)
|
26
|
+
|
27
|
+
expect(action['table']).to(eq('simple'))
|
28
|
+
end
|
29
|
+
|
30
|
+
it 'allows for string expansion in table names' do
|
31
|
+
sut_instance = sut.new(default_opts.update({ 'table' => '%{[a_field]}' }))
|
32
|
+
sample_event['a_field'] = 'a_value'
|
33
|
+
|
34
|
+
action = sut_instance.parse(sample_event)
|
35
|
+
|
36
|
+
expect(action['table']).to(eq('a_value'))
|
37
|
+
end
|
38
|
+
end
|
39
|
+
|
40
|
+
describe 'filter transforms' do
|
41
|
+
describe 'from config' do
|
42
|
+
describe 'malformed configurations' do
|
43
|
+
it 'fails if the transform has no event_data setting' do
|
44
|
+
expect { sut.new(default_opts.update({ 'filter_transform' => [{ 'column_name' => '' }] })) }.to raise_error(/item is incorrectly configured/)
|
45
|
+
end
|
46
|
+
|
47
|
+
it 'fails if the transform has no column_name setting' do
|
48
|
+
expect { sut.new(default_opts.update({ 'filter_transform' => [{ 'event_key' => '' }] })) }.to raise_error(/item is incorrectly configured/)
|
49
|
+
end
|
50
|
+
end
|
51
|
+
|
52
|
+
describe 'properly configured' do
|
53
|
+
it 'maps the event key to the column' do
|
54
|
+
sut_instance = sut.new(default_opts.update({ 'filter_transform' => [{ 'event_key' => 'a_field', 'column_name' => 'a_column' }] }))
|
55
|
+
sample_event['a_field'] = 'a_value'
|
56
|
+
|
57
|
+
action = sut_instance.parse(sample_event)
|
58
|
+
|
59
|
+
expect(action['data']['a_column']).to(eq('a_value'))
|
60
|
+
end
|
61
|
+
|
62
|
+
it 'works with multiple filter transforms' do
|
63
|
+
sut_instance = sut.new(default_opts.update({ 'filter_transform' => [{ 'event_key' => 'a_field', 'column_name' => 'a_column' }, { 'event_key' => 'another_field', 'column_name' => 'a_different_column' }] }))
|
64
|
+
sample_event['a_field'] = 'a_value'
|
65
|
+
sample_event['another_field'] = 'a_second_value'
|
66
|
+
|
67
|
+
action = sut_instance.parse(sample_event)
|
68
|
+
|
69
|
+
expect(action['data']['a_column']).to(eq('a_value'))
|
70
|
+
expect(action['data']['a_different_column']).to(eq('a_second_value'))
|
71
|
+
end
|
72
|
+
|
73
|
+
it 'allows for string expansion in event keys' do
|
74
|
+
sut_instance = sut.new(default_opts.update({ 'filter_transform' => [{ 'event_key' => '%{[pointer_to_another_field]}', 'column_name' => 'a_column' }] }))
|
75
|
+
sample_event['pointer_to_another_field'] = 'another_field'
|
76
|
+
sample_event['another_field'] = 'a_value'
|
77
|
+
|
78
|
+
action = sut_instance.parse(sample_event)
|
79
|
+
|
80
|
+
expect(action['data']['a_column']).to(eq('a_value'))
|
81
|
+
end
|
82
|
+
|
83
|
+
it 'allows for string expansion only filters for things like date string formats' do
|
84
|
+
sut_instance = sut.new(default_opts.update({ 'filter_transform' => [{ 'event_key' => '%{+yyyyMMddHHmm}', 'expansion_only' => true, 'column_name' => 'a_column' }] }))
|
85
|
+
expected_value = Time.now.getutc.strftime('%Y%m%d%H%M')
|
86
|
+
|
87
|
+
action = sut_instance.parse(sample_event)
|
88
|
+
|
89
|
+
expect(action['data']['a_column']).to(eq(expected_value))
|
90
|
+
end
|
91
|
+
|
92
|
+
it 'allows for string expansion in column names' do
|
93
|
+
sut_instance = sut.new(default_opts.update({ 'filter_transform' => [{ 'event_key' => 'a_field', 'column_name' => '%{[pointer_to_another_field]}' }] }))
|
94
|
+
sample_event['a_field'] = 'a_value'
|
95
|
+
sample_event['pointer_to_another_field'] = 'a_different_column'
|
96
|
+
|
97
|
+
action = sut_instance.parse(sample_event)
|
98
|
+
|
99
|
+
expect(action['data']['a_different_column']).to(eq('a_value'))
|
100
|
+
end
|
101
|
+
|
102
|
+
end
|
103
|
+
|
104
|
+
describe 'cassandra type mapping' do
|
105
|
+
[
|
106
|
+
{ :name => 'timestamp', :type => ::Cassandra::Types::Timestamp, :value => Time::parse('1979-07-27 00:00:00 +0300') },
|
107
|
+
{ :name => 'timestamp', :type => ::Cassandra::Types::Timestamp, :value => '1982-05-04 00:00:00 +0300', expected: Time::parse('1982-05-04 00:00:00 +0300') },
|
108
|
+
{ :name => 'timestamp', :type => ::Cassandra::Types::Timestamp, :value => 1457606758, expected: Time.at(1457606758) },
|
109
|
+
{ :name => 'inet', :type => ::Cassandra::Types::Inet, :value => '0.0.0.0' },
|
110
|
+
{ :name => 'float', :type => ::Cassandra::Types::Float, :value => '10.15' },
|
111
|
+
{ :name => 'varchar', :type => ::Cassandra::Types::Varchar, :value => 'a varchar' },
|
112
|
+
{ :name => 'text', :type => ::Cassandra::Types::Text, :value => 'some text' },
|
113
|
+
{ :name => 'blob', :type => ::Cassandra::Types::Blob, :value => '12345678' },
|
114
|
+
{ :name => 'ascii', :type => ::Cassandra::Types::Ascii, :value => 'some ascii' },
|
115
|
+
{ :name => 'bigint', :type => ::Cassandra::Types::Bigint, :value => '100' },
|
116
|
+
{ :name => 'counter', :type => ::Cassandra::Types::Counter, :value => '15' },
|
117
|
+
{ :name => 'int', :type => ::Cassandra::Types::Int, :value => '123' },
|
118
|
+
{ :name => 'varint', :type => ::Cassandra::Types::Varint, :value => '345' },
|
119
|
+
{ :name => 'boolean', :type => ::Cassandra::Types::Boolean, :value => 'true' },
|
120
|
+
{ :name => 'decimal', :type => ::Cassandra::Types::Decimal, :value => '0.12E2' },
|
121
|
+
{ :name => 'double', :type => ::Cassandra::Types::Double, :value => '123.65' },
|
122
|
+
{ :name => 'timeuuid', :type => ::Cassandra::Types::Timeuuid, :value => '00000000-0000-0000-0000-000000000000' }
|
123
|
+
].each { |mapping|
|
124
|
+
# NOTE: this is not the best test there is, but it is the best / simplest I could think of :/
|
125
|
+
it "properly maps #{mapping[:name]} to #{mapping[:type]}" do
|
126
|
+
sut_instance = sut.new(default_opts.update({ 'filter_transform' => [{ 'event_key' => 'a_field', 'column_name' => 'a_column', 'cassandra_type' => mapping[:name] }] }))
|
127
|
+
sample_event['a_field'] = mapping[:value]
|
128
|
+
|
129
|
+
action = sut_instance.parse(sample_event)
|
130
|
+
|
131
|
+
expected_value = mapping.has_key?(:expected) ? mapping[:expected] : mapping[:value]
|
132
|
+
expect(action['data']['a_column'].to_s).to(eq(expected_value.to_s))
|
133
|
+
end
|
134
|
+
}
|
135
|
+
|
136
|
+
it 'properly maps sets to their specific set types' do
|
137
|
+
sut_instance = sut.new(default_opts.update({ 'filter_transform' => [{ 'event_key' => 'a_field', 'column_name' => 'a_column', 'cassandra_type' => 'set<int>' }] }))
|
138
|
+
original_value = [ 1, 2, 3 ]
|
139
|
+
sample_event['a_field'] = original_value
|
140
|
+
|
141
|
+
action = sut_instance.parse(sample_event)
|
142
|
+
|
143
|
+
expect(action['data']['a_column'].to_a).to(eq(original_value))
|
144
|
+
end
|
145
|
+
|
146
|
+
it 'properly maps sets to their specific set types for type which also require actual conversion' do
|
147
|
+
sut_instance = sut.new(default_opts.update({ 'filter_transform' => [{ 'event_key' => 'a_field', 'column_name' => 'a_column', 'cassandra_type' => 'set<timeuuid>' }] }))
|
148
|
+
original_value = %w(00000000-0000-0000-0000-000000000000 00000000-0000-0000-0000-000000000001 00000000-0000-0000-0000-000000000002)
|
149
|
+
sample_event['a_field'] = original_value
|
150
|
+
|
151
|
+
action = sut_instance.parse(sample_event)
|
152
|
+
|
153
|
+
expect(action['data']['a_column'].size).to(eq(original_value.size))
|
154
|
+
action['data']['a_column'].to_a.each { |item|
|
155
|
+
expect(original_value).to(include(item.to_s))
|
156
|
+
}
|
157
|
+
end
|
158
|
+
|
159
|
+
it 'allows for string expansion in cassandra types' do
|
160
|
+
sut_instance = sut.new(default_opts.update({ 'filter_transform' => [{ 'event_key' => 'a_field', 'column_name' => 'a_column', 'cassandra_type' => '%{[pointer_to_a_field]}' }] }))
|
161
|
+
sample_event['a_field'] = '123'
|
162
|
+
sample_event['pointer_to_a_field'] = 'int'
|
163
|
+
|
164
|
+
action = sut_instance.parse(sample_event)
|
165
|
+
|
166
|
+
expect(action['data']['a_column']).to(eq(123))
|
167
|
+
end
|
168
|
+
|
169
|
+
it 'fails in case of an unknown type' do
|
170
|
+
options = default_opts.update({ 'filter_transform' => [{ 'event_key' => 'a_field', 'column_name' => 'a_column', 'cassandra_type' => 'what?!' }] })
|
171
|
+
sut_instance = sut.new(options)
|
172
|
+
sample_event['a_field'] = 'a_value'
|
173
|
+
expect(options['logger']).to(receive(:error))
|
174
|
+
|
175
|
+
expect { sut_instance.parse(sample_event) }.to raise_error(/Cannot convert/)
|
176
|
+
end
|
177
|
+
end
|
178
|
+
end
|
179
|
+
|
180
|
+
describe 'from event' do
|
181
|
+
it 'obtains the filter transform from the event if defined' do
|
182
|
+
sut_instance = sut.new(default_opts.update({ 'filter_transform_event_key' => 'an_event_filter' }))
|
183
|
+
sample_event['a_field'] = 'a_value'
|
184
|
+
sample_event['an_event_filter'] = [{ 'event_key' => 'a_field', 'column_name' => 'a_column' }]
|
185
|
+
|
186
|
+
action = sut_instance.parse(sample_event)
|
187
|
+
|
188
|
+
expect(action['data']['a_column']).to(eq('a_value'))
|
189
|
+
end
|
190
|
+
|
191
|
+
it 'obtains the filter transform from the event even when it is in the metadata' do
|
192
|
+
sut_instance = sut.new(default_opts.update({ 'filter_transform_event_key' => '[@metadata][the_filter]' }))
|
193
|
+
sample_event['a_field'] = 'a_value'
|
194
|
+
sample_event['@metadata'] = { 'the_filter' => [{ 'event_key' => 'a_field', 'column_name' => 'a_column' }] }
|
195
|
+
|
196
|
+
action = sut_instance.parse(sample_event)
|
197
|
+
|
198
|
+
expect(action['data']['a_column']).to(eq('a_value'))
|
199
|
+
end
|
200
|
+
end
|
201
|
+
end
|
202
|
+
|
203
|
+
describe 'hints' do
|
204
|
+
it 'removes fields starting with @' do
|
205
|
+
sut_instance = sut.new(default_opts.update({ 'hints' => {} }))
|
206
|
+
sample_event['leave'] = 'a_value'
|
207
|
+
sample_event['@remove'] = 'another_value'
|
208
|
+
|
209
|
+
action = sut_instance.parse(sample_event)
|
210
|
+
|
211
|
+
expect(action['data']['leave']).to(eq('a_value'))
|
212
|
+
expect(action['data']).not_to(include('@remove'))
|
213
|
+
end
|
214
|
+
|
215
|
+
it 'does not attempt to change items with no hints' do
|
216
|
+
sut_instance = sut.new(default_opts.update({ 'hints' => {} }))
|
217
|
+
expected_value = [ 1, 2, 3 ]
|
218
|
+
sample_event['no_hint_here'] = expected_value
|
219
|
+
|
220
|
+
action = sut_instance.parse(sample_event)
|
221
|
+
|
222
|
+
expect(action['data']['no_hint_here']).to(equal(expected_value))
|
223
|
+
end
|
224
|
+
|
225
|
+
it 'converts items with hints' do
|
226
|
+
sut_instance = sut.new(default_opts.update({ 'hints' => { 'a_set' => 'set<int>', 'an_int' => 'int' } }))
|
227
|
+
original_set = [ 1, 2, 3 ]
|
228
|
+
sample_event['a_set'] = original_set
|
229
|
+
sample_event['an_int'] = '123'
|
230
|
+
|
231
|
+
action = sut_instance.parse(sample_event)
|
232
|
+
|
233
|
+
expect(action['data']['a_set']).to(be_a(Set))
|
234
|
+
expect(action['data']['a_set'].to_a).to(eql(original_set))
|
235
|
+
expect(action['data']['an_int']).to(eql(123))
|
236
|
+
end
|
237
|
+
|
238
|
+
it 'fails for unknown hint types' do
|
239
|
+
options = default_opts.update({ 'hints' => { 'a_field' => 'not_a_real_type' } })
|
240
|
+
sut_instance = sut.new(options)
|
241
|
+
expect(options['logger']).to(receive(:error))
|
242
|
+
|
243
|
+
sample_event['a_field'] = 'a value'
|
244
|
+
|
245
|
+
expect { sut_instance.parse(sample_event) }.to raise_error(/Cannot convert/)
|
246
|
+
end
|
247
|
+
|
248
|
+
it 'fails for unsuccessful hint conversion' do
|
249
|
+
options = default_opts.update({ 'hints' => { 'a_field' => 'int' } })
|
250
|
+
expect(options['logger']).to(receive(:error))
|
251
|
+
|
252
|
+
sut_instance = sut.new(options)
|
253
|
+
|
254
|
+
sample_event['a_field'] = 'i am not an int!!!'
|
255
|
+
expect { sut_instance.parse(sample_event) }.to raise_error(/Cannot convert/)
|
256
|
+
end
|
257
|
+
end
|
258
|
+
|
259
|
+
describe 'ignore_bad_values is turned on' do
|
260
|
+
[
|
261
|
+
{ :name => 'timestamp', :value => 'i dont have to_time', :expected => Time::parse('1970-01-01 00:00:00 +0000') },
|
262
|
+
{ :name => 'inet', :value => 'i am not an inet address', :expected => '0.0.0.0' },
|
263
|
+
{ :name => 'float', :value => 'i am not a float', :expected => 0.0 },
|
264
|
+
{ :name => 'bigint', :value => 'i am not a bigint', :expected => 0 },
|
265
|
+
{ :name => 'counter', :value => 'i am not a counter', :expected => 0 },
|
266
|
+
{ :name => 'int', :value => 'i am not a int', :expected => 0 },
|
267
|
+
{ :name => 'varint', :value => 'i am not a varint', :expected => 0 },
|
268
|
+
{ :name => 'double', :value => 'i am not a double', :expected => 0.0 },
|
269
|
+
{ :name => 'timeuuid', :value => 'i am not a timeuuid', :expected => '00000000-0000-0000-0000-000000000000' }
|
270
|
+
].each { |mapping|
|
271
|
+
# NOTE: this is not the best test there is, but it is the best / simplest I could think of :/
|
272
|
+
it "properly defaults #{mapping[:name]}" do
|
273
|
+
options = default_opts.update({ 'ignore_bad_values' => true, 'hints' => { 'a_field' => mapping[:name] } })
|
274
|
+
expect(options['logger']).to(receive(:warn))
|
275
|
+
sut_instance = sut.new(options)
|
276
|
+
sample_event['a_field'] = mapping[:value]
|
277
|
+
|
278
|
+
action = sut_instance.parse(sample_event)
|
279
|
+
|
280
|
+
expect(action['data']['a_field'].to_s).to(eq(mapping[:expected].to_s))
|
281
|
+
end
|
282
|
+
}
|
283
|
+
|
284
|
+
it 'properly default sets' do
|
285
|
+
options = default_opts.update({ 'ignore_bad_values' => true, 'hints' => { 'a_field' => 'set<float>' } })
|
286
|
+
expect(options['logger']).to(receive(:warn))
|
287
|
+
sut_instance = sut.new(options)
|
288
|
+
sample_event['a_field'] = 'i am not a set'
|
289
|
+
|
290
|
+
action = sut_instance.parse(sample_event)
|
291
|
+
|
292
|
+
expect(action['data']['a_field'].size).to(be(0))
|
293
|
+
expect(action['data']['a_field']).to(be_an_instance_of(Set))
|
294
|
+
end
|
295
|
+
|
296
|
+
it 'raises an ArgumentError in case we try to default a type we dont know' do
|
297
|
+
options = default_opts.update({ 'ignore_bad_values' => true, 'hints' => { 'a_field' => 'map<float>' } })
|
298
|
+
sut_instance = sut.new(options)
|
299
|
+
sample_event['a_field'] = 'i am not a set'
|
300
|
+
|
301
|
+
expect { sut_instance.parse(sample_event) }.to raise_error ArgumentError
|
302
|
+
end
|
303
|
+
end
|
304
|
+
end
|
@@ -0,0 +1,201 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require_relative '../../cassandra_spec_helper'
|
3
|
+
require 'logstash/outputs/cassandra/safe_submitter'
|
4
|
+
|
5
|
+
RSpec.describe LogStash::Outputs::Cassandra::SafeSubmitter do
|
6
|
+
let(:sut) { LogStash::Outputs::Cassandra::SafeSubmitter }
|
7
|
+
let(:default_options) {
|
8
|
+
logger = double
|
9
|
+
allow(logger).to(receive(:debug))
|
10
|
+
allow(logger).to(receive(:error))
|
11
|
+
{
|
12
|
+
'logger' => logger,
|
13
|
+
'cassandra' => double,
|
14
|
+
'username' => 'a user',
|
15
|
+
'password' => 'a password',
|
16
|
+
'protocol_version' => 3,
|
17
|
+
'hosts' => 'some host',
|
18
|
+
'port' => 9042,
|
19
|
+
'consistency' => 'one',
|
20
|
+
'request_timeout' => 10,
|
21
|
+
'retry_policy' => { 'type' => 'default' },
|
22
|
+
'concrete_retry_policy' => ::Cassandra::Retry::Policies::Default,
|
23
|
+
'keyspace' => 'the final frontier'
|
24
|
+
}
|
25
|
+
}
|
26
|
+
|
27
|
+
def setup_session_double(options)
|
28
|
+
session_double = double
|
29
|
+
cluster_double = double
|
30
|
+
expect(cluster_double).to(receive(:connect)).with(options['keyspace']).and_return(session_double)
|
31
|
+
expect(options['cassandra']).to(receive(:cluster).with(
|
32
|
+
username: options['username'],
|
33
|
+
password: options['password'],
|
34
|
+
protocol_version: options['protocol_version'],
|
35
|
+
hosts: options['hosts'],
|
36
|
+
port: options['port'],
|
37
|
+
consistency: options['consistency'].to_sym,
|
38
|
+
timeout: options['request_timeout'],
|
39
|
+
retry_policy: options['concrete_retry_policy'],
|
40
|
+
logger: options['logger']
|
41
|
+
)).and_return(cluster_double)
|
42
|
+
return { :session_double => session_double }
|
43
|
+
end
|
44
|
+
|
45
|
+
describe 'init' do
|
46
|
+
it 'properly inits the cassandra session' do
|
47
|
+
setup_session_double(default_options)
|
48
|
+
|
49
|
+
sut.new(default_options)
|
50
|
+
end
|
51
|
+
|
52
|
+
[
|
53
|
+
{ :setting => { 'type' => 'default' }, :concrete_retry_policy => ::Cassandra::Retry::Policies::Default },
|
54
|
+
{ :setting => { 'type' => 'downgrading_consistency' }, :concrete_retry_policy => ::Cassandra::Retry::Policies::DowngradingConsistency },
|
55
|
+
{ :setting => { 'type' => 'failthrough' }, :concrete_retry_policy => ::Cassandra::Retry::Policies::Fallthrough },
|
56
|
+
{ :setting => { 'type' => 'backoff', 'backoff_type' => '**', 'backoff_size' => 2, 'retry_limit' => 10 },
|
57
|
+
:concrete_retry_policy => ::Cassandra::Retry::Policies::Backoff }
|
58
|
+
].each { |mapping|
|
59
|
+
it "supports the #{mapping[:concrete_retry_policy]} retry policy by passing #{mapping[:setting]['type']} as the retry_policy" do
|
60
|
+
options = default_options.merge({ 'retry_policy' => mapping[:setting], 'concrete_retry_policy' => mapping[:concrete_retry_policy] })
|
61
|
+
setup_session_double(options)
|
62
|
+
|
63
|
+
sut.new(options)
|
64
|
+
end
|
65
|
+
}
|
66
|
+
|
67
|
+
it 'properly initializes the backoff retry policy' do
|
68
|
+
retry_policy_config = { 'type' => 'backoff', 'backoff_type' => '**', 'backoff_size' => 2, 'retry_limit' => 10 }
|
69
|
+
expected_policy = double
|
70
|
+
options = default_options.merge({ 'retry_policy' => retry_policy_config, 'concrete_retry_policy' => expected_policy })
|
71
|
+
expect(::Cassandra::Retry::Policies::Backoff).to(receive(:new).with({
|
72
|
+
'backoff_type' => options['retry_policy']['backoff_type'], 'backoff_size' => options['retry_policy']['backoff_size'],
|
73
|
+
'retry_limit' => options['retry_policy']['retry_limit'], 'logger' => options['logger']}).and_return(expected_policy))
|
74
|
+
setup_session_double(options)
|
75
|
+
|
76
|
+
sut.new(options)
|
77
|
+
end
|
78
|
+
|
79
|
+
it 'fails if the retry policy is unknown' do
|
80
|
+
options = default_options.merge({ 'retry_policy' => 'bad policy' })
|
81
|
+
|
82
|
+
expect { sut.new(options) }.to(raise_error(ArgumentError))
|
83
|
+
end
|
84
|
+
end
|
85
|
+
|
86
|
+
describe 'execution' do
|
87
|
+
let(:one_action) {{
|
88
|
+
'table' => 'a_table',
|
89
|
+
'data' => {
|
90
|
+
'a_column' => 'a_value',
|
91
|
+
'another_column' => 'another_value'
|
92
|
+
}
|
93
|
+
}}
|
94
|
+
let(:expected_query_for_one_action) { "INSERT INTO a_table (a_column, another_column)\nVALUES (?, ?)" }
|
95
|
+
let(:another_action) {{
|
96
|
+
'table' => 'another_table',
|
97
|
+
'data' => {
|
98
|
+
'a_column' => 'a_value',
|
99
|
+
'another_column' => 'another_value',
|
100
|
+
'a_third_column' => 'another_value'
|
101
|
+
}
|
102
|
+
}}
|
103
|
+
let(:expected_query_for_another_action) { "INSERT INTO another_table (a_column, another_column, a_third_column)\nVALUES (?, ?, ?)" }
|
104
|
+
|
105
|
+
def generate_future_double
|
106
|
+
future_double = double
|
107
|
+
expect(future_double).to(receive(:join))
|
108
|
+
expect(future_double).to(receive(:on_failure))
|
109
|
+
return future_double
|
110
|
+
end
|
111
|
+
|
112
|
+
it 'prepares and executes the query' do
|
113
|
+
doubles = setup_session_double(default_options)
|
114
|
+
expect(doubles[:session_double]).to(receive(:prepare).with(expected_query_for_one_action)).and_return('eureka')
|
115
|
+
expect(doubles[:session_double]).to(receive(:execute_async).with('eureka', :arguments => one_action['data'].values)).and_return(generate_future_double)
|
116
|
+
sut_instance = sut.new(default_options)
|
117
|
+
|
118
|
+
sut_instance.submit([one_action])
|
119
|
+
end
|
120
|
+
|
121
|
+
it 'caches the generated query' do
|
122
|
+
doubles = setup_session_double(default_options)
|
123
|
+
expect(doubles[:session_double]).to(receive(:prepare).with(expected_query_for_one_action).once).and_return('eureka')
|
124
|
+
2.times {
|
125
|
+
expect(doubles[:session_double]).to(receive(:execute_async).with('eureka', :arguments => one_action['data'].values)).and_return(generate_future_double)
|
126
|
+
}
|
127
|
+
sut_instance = sut.new(default_options)
|
128
|
+
|
129
|
+
sut_instance.submit([one_action, one_action])
|
130
|
+
end
|
131
|
+
|
132
|
+
it 'does not confuse between a new query and cached queries' do
|
133
|
+
doubles = setup_session_double(default_options)
|
134
|
+
expect(doubles[:session_double]).to(receive(:prepare).with(expected_query_for_one_action).once).and_return('eureka')
|
135
|
+
expect(doubles[:session_double]).to(receive(:prepare).with(expected_query_for_another_action).once).and_return('great scott')
|
136
|
+
expect(doubles[:session_double]).to(receive(:execute_async).with('eureka', :arguments => one_action['data'].values)).and_return(generate_future_double)
|
137
|
+
expect(doubles[:session_double]).to(receive(:execute_async).with('great scott', :arguments => another_action['data'].values)).and_return(generate_future_double)
|
138
|
+
sut_instance = sut.new(default_options)
|
139
|
+
|
140
|
+
sut_instance.submit([one_action, another_action])
|
141
|
+
end
|
142
|
+
|
143
|
+
it 'logs and skips failed query preps' do
|
144
|
+
setup_session_double(default_options)
|
145
|
+
sut_instance = sut.new(default_options)
|
146
|
+
expect(sut_instance).to(receive(:get_query).and_raise(ArgumentError))
|
147
|
+
expect(default_options['logger']).to(receive(:error))
|
148
|
+
|
149
|
+
expect { sut_instance.submit([one_action]) }.to_not raise_error
|
150
|
+
end
|
151
|
+
|
152
|
+
it 'logs and skips queries which failed during send' do
|
153
|
+
setup_session_double(default_options)
|
154
|
+
sut_instance = sut.new(default_options)
|
155
|
+
expect(sut_instance).to(receive(:get_query).and_return(double))
|
156
|
+
expect(sut_instance).to(receive(:execute_async).and_raise(ArgumentError))
|
157
|
+
expect(default_options['logger']).to(receive(:error))
|
158
|
+
|
159
|
+
expect { sut_instance.submit([one_action]) }.to_not raise_error
|
160
|
+
end
|
161
|
+
|
162
|
+
it 'does not retry queries which failed to execute in case the retry policy is not backoff' do
|
163
|
+
doubles = setup_session_double(default_options)
|
164
|
+
expect(doubles[:session_double]).to(receive(:prepare).and_return('great scott'))
|
165
|
+
# setup a fail once execution
|
166
|
+
fail_on_join_future = Object.new
|
167
|
+
def fail_on_join_future.on_failure(&block)
|
168
|
+
@block = block
|
169
|
+
end
|
170
|
+
def fail_on_join_future.join
|
171
|
+
@block.call('oh boy...')
|
172
|
+
end
|
173
|
+
expect(doubles[:session_double]).to(receive(:execute_async).with('great scott', :arguments => another_action['data'].values).once).and_return(fail_on_join_future)
|
174
|
+
sut_instance = sut.new(default_options)
|
175
|
+
|
176
|
+
sut_instance.submit([another_action])
|
177
|
+
end
|
178
|
+
|
179
|
+
it 'retries queries which failed to execute' do
|
180
|
+
options = default_options.merge({ 'retry_policy' => { 'type' => 'backoff', 'backoff_type' => '**', 'backoff_size' => 2, 'retry_limit' => 10 },
|
181
|
+
'concrete_retry_policy' => ::Cassandra::Retry::Policies::Backoff })
|
182
|
+
doubles = setup_session_double(options)
|
183
|
+
expect(doubles[:session_double]).to(receive(:prepare).and_return('eureka'))
|
184
|
+
expect(doubles[:session_double]).to(receive(:prepare).and_return('great scott'))
|
185
|
+
expect(doubles[:session_double]).to(receive(:execute_async).with('eureka', :arguments => one_action['data'].values)).and_return(generate_future_double)
|
186
|
+
# setup a fail once execution
|
187
|
+
fail_on_join_future = Object.new
|
188
|
+
def fail_on_join_future.on_failure(&block)
|
189
|
+
@block = block
|
190
|
+
end
|
191
|
+
def fail_on_join_future.join
|
192
|
+
@block.call('oh boy...')
|
193
|
+
end
|
194
|
+
expect(doubles[:session_double]).to(receive(:execute_async).with('great scott', :arguments => another_action['data'].values)).and_return(fail_on_join_future)
|
195
|
+
expect(doubles[:session_double]).to(receive(:execute_async).with('great scott', :arguments => another_action['data'].values)).and_return(generate_future_double)
|
196
|
+
sut_instance = sut.new(options)
|
197
|
+
|
198
|
+
sut_instance.submit([one_action, another_action])
|
199
|
+
end
|
200
|
+
end
|
201
|
+
end
|