fluent-plugin-kinesis 0.4.1 → 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. checksums.yaml +4 -4
  2. data/.gitignore +13 -18
  3. data/.travis.yml +9 -9
  4. data/CHANGELOG.md +9 -0
  5. data/CONTRIBUTORS.txt +1 -1
  6. data/Gemfile +12 -9
  7. data/LICENSE.txt +39 -201
  8. data/Makefile +40 -0
  9. data/NOTICE.txt +1 -1
  10. data/README-v0.4.md +348 -0
  11. data/README.md +398 -183
  12. data/Rakefile +20 -14
  13. data/benchmark/dummer.conf +13 -0
  14. data/benchmark/firehose.conf +24 -0
  15. data/benchmark/producer.conf +28 -0
  16. data/benchmark/streams.conf +24 -0
  17. data/fluent-plugin-kinesis.gemspec +34 -23
  18. data/gemfiles/Gemfile.fluentd-0.10.58 +20 -0
  19. data/lib/fluent/plugin/kinesis_helper.rb +30 -0
  20. data/lib/fluent/plugin/kinesis_helper/api.rb +164 -0
  21. data/lib/fluent/plugin/kinesis_helper/class_methods.rb +120 -0
  22. data/lib/fluent/plugin/kinesis_helper/client.rb +36 -0
  23. data/lib/fluent/plugin/kinesis_helper/credentials.rb +51 -0
  24. data/lib/fluent/plugin/kinesis_helper/error.rb +38 -0
  25. data/lib/fluent/plugin/kinesis_helper/format.rb +85 -0
  26. data/lib/fluent/plugin/kinesis_helper/initialize.rb +58 -0
  27. data/lib/fluent/plugin/kinesis_helper/kpl.rb +81 -0
  28. data/lib/fluent/plugin/out_kinesis.rb +13 -11
  29. data/lib/fluent/plugin/out_kinesis_firehose.rb +44 -0
  30. data/lib/fluent/plugin/out_kinesis_producer.rb +38 -0
  31. data/lib/fluent/plugin/out_kinesis_streams.rb +47 -0
  32. data/lib/fluent/plugin/patched_detach_process_impl.rb +103 -0
  33. data/lib/fluent_plugin_kinesis/version.rb +17 -0
  34. data/lib/kinesis_producer.rb +24 -0
  35. data/lib/kinesis_producer/binary.rb +10 -0
  36. data/lib/kinesis_producer/daemon.rb +238 -0
  37. data/lib/kinesis_producer/library.rb +122 -0
  38. data/lib/kinesis_producer/protobuf/config.pb.rb +66 -0
  39. data/lib/kinesis_producer/protobuf/messages.pb.rb +151 -0
  40. data/lib/kinesis_producer/tasks/binary.rake +73 -0
  41. metadata +196 -36
  42. data/lib/fluent/plugin/version.rb +0 -16
  43. data/test/helper.rb +0 -32
  44. data/test/plugin/test_out_kinesis.rb +0 -641
@@ -1,16 +0,0 @@
1
- # Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License"). You
4
- # may not use this file except in compliance with the License. A copy of
5
- # the License is located at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # or in the "license" file accompanying this file. This file is
10
- # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11
- # ANY KIND, either express or implied. See the License for the specific
12
- # language governing permissions and limitations under the License.
13
-
14
- module FluentPluginKinesis
15
- VERSION = '0.4.1'
16
- end
@@ -1,32 +0,0 @@
1
- # Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License"). You
4
- # may not use this file except in compliance with the License. A copy of
5
- # the License is located at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # or in the "license" file accompanying this file. This file is
10
- # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11
- # ANY KIND, either express or implied. See the License for the specific
12
- # language governing permissions and limitations under the License.
13
-
14
- require 'rubygems'
15
- require 'bundler'
16
- require 'stringio'
17
- begin
18
- Bundler.setup(:default, :development)
19
- rescue Bundler::BundlerError => e
20
- $stderr.puts e.message
21
- $stderr.puts "Run `bundle install` to install missing gems"
22
- exit e.status_code
23
- end
24
-
25
- require 'test/unit'
26
- require 'test/unit/rr'
27
-
28
- $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
29
- $LOAD_PATH.unshift(File.dirname(__FILE__))
30
- require 'fluent/load'
31
- require 'fluent/test'
32
- require 'fluent/plugin/out_kinesis'
@@ -1,641 +0,0 @@
1
- # Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License"). You
4
- # may not use this file except in compliance with the License. A copy of
5
- # the License is located at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # or in the "license" file accompanying this file. This file is
10
- # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11
- # ANY KIND, either express or implied. See the License for the specific
12
- # language governing permissions and limitations under the License.
13
-
14
- require 'helper'
15
-
16
- class KinesisOutputTest < Test::Unit::TestCase
17
- def setup
18
- Fluent::Test.setup
19
- end
20
-
21
- CONFIG = %[
22
- aws_key_id test_key_id
23
- aws_sec_key test_sec_key
24
- stream_name test_stream
25
- region us-east-1
26
- partition_key test_partition_key
27
- ]
28
-
29
- CONFIG_YAJL= CONFIG + %[
30
- use_yajl true
31
- ]
32
-
33
- CONFIG_WITH_COMPRESSION = CONFIG + %[
34
- zlib_compression true
35
- ]
36
-
37
- CONFIG_YAJL_WITH_COMPRESSION = CONFIG_YAJL + %[
38
- zlib_compression true
39
- ]
40
-
41
- def create_driver(conf = CONFIG, tag='test')
42
- Fluent::Test::BufferedOutputTestDriver
43
- .new(FluentPluginKinesis::OutputFilter, tag).configure(conf)
44
- end
45
-
46
- def create_mock_client
47
- client = mock(Object.new)
48
- mock(Aws::Kinesis::Client).new({}) { client }
49
- return client
50
- end
51
-
52
- def test_configure
53
- d = create_driver
54
- assert_equal 'test_key_id', d.instance.aws_key_id
55
- assert_equal 'test_sec_key', d.instance.aws_sec_key
56
- assert_equal 'test_stream', d.instance.stream_name
57
- assert_equal 'us-east-1', d.instance.region
58
- assert_equal 'test_partition_key', d.instance.partition_key
59
- end
60
-
61
- def test_configure_with_credentials
62
- d = create_driver(<<-EOS)
63
- profile default
64
- credentials_path /home/scott/.aws/credentials
65
- stream_name test_stream
66
- region us-east-1
67
- partition_key test_partition_key
68
- EOS
69
-
70
- assert_equal 'default', d.instance.profile
71
- assert_equal '/home/scott/.aws/credentials', d.instance.credentials_path
72
- assert_equal 'test_stream', d.instance.stream_name
73
- assert_equal 'us-east-1', d.instance.region
74
- assert_equal 'test_partition_key', d.instance.partition_key
75
- end
76
-
77
- def test_load_client
78
- client = stub(Object.new)
79
- client.describe_stream
80
- client.put_records { {} }
81
-
82
- stub(Aws::Kinesis::Client).new do |options|
83
- assert_equal("test_key_id", options[:access_key_id])
84
- assert_equal("test_sec_key", options[:secret_access_key])
85
- assert_equal("us-east-1", options[:region])
86
- client
87
- end
88
-
89
- d = create_driver
90
- d.run
91
- end
92
-
93
- def test_load_client_with_credentials
94
- client = stub(Object.new)
95
- client.describe_stream
96
- client.put_records { {} }
97
-
98
- stub(Aws::Kinesis::Client).new do |options|
99
- assert_equal(nil, options[:access_key_id])
100
- assert_equal(nil, options[:secret_access_key])
101
- assert_equal("us-east-1", options[:region])
102
-
103
- credentials = options[:credentials]
104
- assert_equal("default", credentials.profile_name)
105
- assert_equal("/home/scott/.aws/credentials", credentials.path)
106
-
107
- client
108
- end
109
-
110
- d = create_driver(<<-EOS)
111
- profile default
112
- credentials_path /home/scott/.aws/credentials
113
- stream_name test_stream
114
- region us-east-1
115
- partition_key test_partition_key
116
- EOS
117
-
118
- d.run
119
- end
120
-
121
- def test_load_client_with_role_arn
122
- client = stub(Object.new)
123
- client.describe_stream
124
- client.put_records { {} }
125
-
126
- stub(Aws::AssumeRoleCredentials).new do |options|
127
- assert_equal("arn:aws:iam::001234567890:role/my-role", options[:role_arn])
128
- assert_equal("aws-fluent-plugin-kinesis", options[:role_session_name])
129
- assert_equal("my_external_id", options[:external_id])
130
- assert_equal(3600, options[:duration_seconds])
131
- "sts_credentials"
132
- end
133
-
134
- stub(Aws::Kinesis::Client).new do |options|
135
- assert_equal("sts_credentials", options[:credentials])
136
- client
137
- end
138
-
139
- d = create_driver(<<-EOS)
140
- role_arn arn:aws:iam::001234567890:role/my-role
141
- external_id my_external_id
142
- stream_name test_stream
143
- region us-east-1
144
- partition_key test_partition_key
145
- EOS
146
- d.run
147
- end
148
-
149
- def test_configure_with_more_options
150
-
151
- conf = %[
152
- stream_name test_stream
153
- region us-east-1
154
- ensure_stream_connection false
155
- http_proxy http://proxy:3333/
156
- partition_key test_partition_key
157
- partition_key_expr record
158
- explicit_hash_key test_hash_key
159
- explicit_hash_key_expr record
160
- order_events true
161
- use_yajl true
162
- ]
163
- d = create_driver(conf)
164
- assert_equal 'test_stream', d.instance.stream_name
165
- assert_equal 'us-east-1', d.instance.region
166
- assert_equal false, d.instance.ensure_stream_connection
167
- assert_equal 'http://proxy:3333/', d.instance.http_proxy
168
- assert_equal 'test_partition_key', d.instance.partition_key
169
- assert_equal 'Proc',
170
- d.instance.instance_variable_get(:@partition_key_proc).class.to_s
171
- assert_equal 'test_hash_key', d.instance.explicit_hash_key
172
- assert_equal 'Proc',
173
- d.instance.instance_variable_get(:@explicit_hash_key_proc).class.to_s
174
- assert_equal 'a',
175
- d.instance.instance_variable_get(:@partition_key_proc).call('a')
176
- assert_equal 'a',
177
- d.instance.instance_variable_get(:@explicit_hash_key_proc).call('a')
178
- assert_equal true, d.instance.order_events
179
- assert_equal nil, d.instance.instance_variable_get(:@sequence_number_for_ordering)
180
- assert_equal true, d.instance.use_yajl
181
- end
182
-
183
- def test_mode_configuration
184
-
185
- conf = %[
186
- stream_name test_stream
187
- region us-east-1
188
- partition_key test_partition_key
189
- ]
190
- d = create_driver(conf)
191
- assert_equal(false, d.instance.order_events)
192
- assert_equal(false, d.instance.instance_variable_get(:@parallel_mode))
193
-
194
- conf = %[
195
- stream_name test_stream
196
- region us-east-1
197
- partition_key test_partition_key
198
- order_events true
199
- ]
200
- d = create_driver(conf)
201
- assert_equal(true, d.instance.order_events)
202
- assert_equal(false, d.instance.instance_variable_get(:@parallel_mode))
203
-
204
- conf = %[
205
- stream_name test_stream
206
- region us-east-1
207
- partition_key test_partition_key
208
- num_threads 1
209
- ]
210
- d = create_driver(conf)
211
- assert_equal(false, d.instance.order_events)
212
- assert_equal(false, d.instance.instance_variable_get(:@parallel_mode))
213
-
214
- conf = %[
215
- stream_name test_stream
216
- region us-east-1
217
- partition_key test_partition_key
218
- num_threads 2
219
- ]
220
- d = create_driver(conf)
221
- assert_equal(false, d.instance.order_events)
222
- assert_equal(true, d.instance.instance_variable_get(:@parallel_mode))
223
-
224
- conf = %[
225
- stream_name test_stream
226
- region us-east-1
227
- partition_key test_partition_key
228
- detach_process 1
229
- ]
230
- d = create_driver(conf)
231
- assert_equal(false, d.instance.order_events)
232
- assert_equal(true, d.instance.instance_variable_get(:@parallel_mode))
233
-
234
- conf = %[
235
- stream_name test_stream
236
- region us-east-1
237
- partition_key test_partition_key
238
- order_events true
239
- detach_process 1
240
- num_threads 2
241
- ]
242
- d = create_driver(conf)
243
- assert_equal(false, d.instance.order_events)
244
- assert_equal(true, d.instance.instance_variable_get(:@parallel_mode))
245
-
246
- end
247
-
248
-
249
- data("json"=>CONFIG, "yajl"=>CONFIG_YAJL)
250
- def test_format_without_compression(config)
251
-
252
- d = create_driver(config)
253
-
254
- data1 = {"test_partition_key"=>"key1","a"=>1,"time"=>"2011-01-02T13:14:15Z","tag"=>"test"}
255
- data2 = {"test_partition_key"=>"key2","a"=>2,"time"=>"2011-01-02T13:14:15Z","tag"=>"test"}
256
-
257
- time = Time.parse("2011-01-02 13:14:15 UTC").to_i
258
- d.emit(data1, time)
259
- d.emit(data2, time)
260
-
261
- d.expect_format({
262
- 'data' => data1.to_json,
263
- 'partition_key' => 'key1' }.to_msgpack
264
- )
265
- d.expect_format({
266
- 'data' => data2.to_json,
267
- 'partition_key' => 'key2' }.to_msgpack
268
- )
269
-
270
- client = create_mock_client
271
- client.describe_stream(stream_name: 'test_stream')
272
- client.put_records(
273
- stream_name: 'test_stream',
274
- records: [
275
- {
276
- data: data1.to_json,
277
- partition_key: 'key1'
278
- },
279
- {
280
- data: data2.to_json,
281
- partition_key: 'key2'
282
- }
283
- ]
284
- ) { {} }
285
-
286
- d.run
287
- end
288
-
289
- data("json"=>CONFIG_WITH_COMPRESSION, "yajl"=>CONFIG_YAJL_WITH_COMPRESSION)
290
- def test_format_with_compression(config)
291
-
292
- d = create_driver(config)
293
-
294
- data1 = {"test_partition_key"=>"key1","a"=>1,"time"=>"2011-01-02T13:14:15Z","tag"=>"test"}
295
- data2 = {"test_partition_key"=>"key2","a"=>2,"time"=>"2011-01-02T13:14:15Z","tag"=>"test"}
296
-
297
- time = Time.parse("2011-01-02 13:14:15 UTC").to_i
298
- d.emit(data1, time)
299
- d.emit(data2, time)
300
-
301
- d.expect_format({
302
- 'data' => data1.to_json,
303
- 'partition_key' => 'key1' }.to_msgpack
304
- )
305
- d.expect_format({
306
- 'data' => data2.to_json,
307
- 'partition_key' => 'key2' }.to_msgpack
308
- )
309
-
310
- client = create_mock_client
311
- client.describe_stream(stream_name: 'test_stream')
312
- client.put_records(
313
- stream_name: 'test_stream',
314
- records: [
315
- {
316
- data: Zlib::Deflate.deflate(data1.to_json),
317
- partition_key: 'key1'
318
- },
319
- {
320
- data: Zlib::Deflate.deflate(data2.to_json),
321
- partition_key: 'key2'
322
- }
323
- ]
324
- ) { {} }
325
-
326
- d.run
327
- end
328
-
329
- def test_order_events
330
-
331
- d = create_driver(CONFIG + "\norder_events true")
332
-
333
- data1 = {"test_partition_key"=>"key1","a"=>1,"time"=>"2011-01-02T13:14:15Z","tag"=>"test"}
334
- data2 = {"test_partition_key"=>"key2","a"=>2,"time"=>"2011-01-02T13:14:15Z","tag"=>"test"}
335
-
336
- time = Time.parse("2011-01-02 13:14:15 UTC").to_i
337
- d.emit(data1, time)
338
- d.emit(data2, time)
339
-
340
- d.expect_format({
341
- 'data' => data1.to_json,
342
- 'partition_key' => 'key1' }.to_msgpack
343
- )
344
- d.expect_format({
345
- 'data' => data2.to_json,
346
- 'partition_key' => 'key2' }.to_msgpack
347
- )
348
-
349
- client = create_mock_client
350
- client.describe_stream(stream_name: 'test_stream')
351
- client.put_record(
352
- data: data1.to_json,
353
- partition_key: 'key1',
354
- stream_name: 'test_stream'
355
- ) { {sequence_number: 1} }
356
- client.put_record(
357
- data: data2.to_json,
358
- partition_key: 'key2',
359
- sequence_number_for_ordering: 1,
360
- stream_name: 'test_stream'
361
- ) { {} }
362
-
363
- d.run
364
- end
365
-
366
- def test_format_at_lowlevel
367
- d = create_driver
368
- data = {"test_partition_key"=>"key1","a"=>1}
369
- assert_equal(
370
- MessagePack.pack({
371
- "data" => data.to_json,
372
- "partition_key" => "key1"
373
- }),
374
- d.instance.format('test','test',data)
375
- )
376
- end
377
-
378
- def test_format_at_lowlevel_with_more_options
379
-
380
- conf = %[
381
- stream_name test_stream
382
- region us-east-1
383
- partition_key test_partition_key
384
- partition_key_expr record
385
- explicit_hash_key test_hash_key
386
- explicit_hash_key_expr record
387
- order_events true
388
- ]
389
-
390
- d = create_driver(conf)
391
- data = {"test_partition_key"=>"key1","test_hash_key"=>"hash1","a"=>1}
392
- assert_equal(
393
- MessagePack.pack({
394
- "data" => data.to_json,
395
- "partition_key" => "key1",
396
- "explicit_hash_key" => "hash1"
397
- }),
398
- d.instance.format('test','test',data)
399
- )
400
- end
401
-
402
- def test_multibyte_with_yajl_without_compression
403
-
404
- d = create_driver(CONFIG_YAJL)
405
-
406
- data1 = {"test_partition_key"=>"key1","a"=>"\xE3\x82\xA4\xE3\x83\xB3\xE3\x82\xB9\xE3\x83\x88\xE3\x83\xBC\xE3\x83\xAB","time"=>"2011-01-02T13:14:15Z","tag"=>"test"}
407
- json = Yajl.dump(data1)
408
- data1["a"].force_encoding("ASCII-8BIT")
409
-
410
- time = Time.parse("2011-01-02 13:14:15 UTC").to_i
411
- d.emit(data1, time)
412
-
413
- d.expect_format({
414
- 'data' => json,
415
- 'partition_key' => 'key1' }.to_msgpack
416
- )
417
-
418
- client = create_mock_client
419
- client.describe_stream(stream_name: 'test_stream')
420
- client.put_records(
421
- stream_name: 'test_stream',
422
- records: [
423
- {
424
- data: json,
425
- partition_key: 'key1'
426
- }
427
- ]
428
- ) { {} }
429
-
430
- d.run
431
- end
432
-
433
- def test_multibyte_with_yajl_with_compression
434
-
435
- d = create_driver(CONFIG_YAJL_WITH_COMPRESSION)
436
-
437
- data1 = {"test_partition_key"=>"key1","a"=>"\xE3\x82\xA4\xE3\x83\xB3\xE3\x82\xB9\xE3\x83\x88\xE3\x83\xBC\xE3\x83\xAB","time"=>"2011-01-02T13:14:15Z","tag"=>"test"}
438
- json = Yajl.dump(data1)
439
- data1["a"].force_encoding("ASCII-8BIT")
440
-
441
- time = Time.parse("2011-01-02 13:14:15 UTC").to_i
442
- d.emit(data1, time)
443
-
444
- d.expect_format({
445
- 'data' => json,
446
- 'partition_key' => 'key1' }.to_msgpack
447
- )
448
-
449
- client = create_mock_client
450
- client.describe_stream(stream_name: 'test_stream')
451
- client.put_records(
452
- stream_name: 'test_stream',
453
- records: [
454
- {
455
- data: Zlib::Deflate.deflate(json),
456
- partition_key: 'key1'
457
- }
458
- ]
459
- ) { {} }
460
-
461
- d.run
462
- end
463
-
464
- def test_get_key
465
- d = create_driver
466
- assert_equal(
467
- "1",
468
- d.instance.send(:get_key, "partition_key", {"test_partition_key" => 1})
469
- )
470
-
471
- assert_equal(
472
- "abc",
473
- d.instance.send(:get_key, "partition_key", {"test_partition_key" => "abc"})
474
- )
475
-
476
- d = create_driver(%[
477
- random_partition_key true
478
- stream_name test_stream
479
- region us-east-1'
480
- ])
481
-
482
- assert_match(
483
- /\A[\da-f-]{36}\z/,
484
- d.instance.send(:get_key, 'foo', 'bar')
485
- )
486
-
487
- d = create_driver(%[
488
- random_partition_key true
489
- partition_key test_key
490
- stream_name test_stream
491
- region us-east-1'
492
- ])
493
-
494
- assert_match(
495
- /\A[\da-f-]{36}\z/,
496
- d.instance.send(
497
- :get_key,
498
- 'partition_key',
499
- {"test_key" => 'key1'}
500
- )
501
- )
502
-
503
- d = create_driver(%[
504
- random_partition_key true
505
- partition_key test_key
506
- explicit_hash_key explicit_key
507
- stream_name test_stream
508
- region us-east-1'
509
- ])
510
-
511
- assert_match(
512
- /\A[\da-f-]{36}\z/,
513
- d.instance.send(
514
- :get_key,
515
- 'partition_key',
516
- {"test_key" => 'key1', "explicit_key" => 'key2'}
517
- )
518
- )
519
- end
520
-
521
- def test_record_exceeds_max_size
522
- d = create_driver
523
-
524
- # PUT_RECORD_MAX_DATA_SIZE = 1024*1024 is way too big, try something smaller (10), just to verify the logic not the actual value
525
- original_put_record_max_data_size = d.instance.class.send(:remove_const, :PUT_RECORD_MAX_DATA_SIZE) if d.instance.class.const_defined?(:PUT_RECORD_MAX_DATA_SIZE)
526
- d.instance.class.const_set(:PUT_RECORD_MAX_DATA_SIZE, 10)
527
-
528
- string = ''
529
- (1..5).each{ string = string + '1' }
530
- assert_equal(
531
- false,
532
- d.instance.send(:record_exceeds_max_size?,string)
533
- )
534
-
535
- string = ''
536
- (1..10).each{ string = string + '1' }
537
- assert_equal(
538
- false,
539
- d.instance.send(:record_exceeds_max_size?,string)
540
- )
541
-
542
- string = ''
543
- (1..11).each{ string = string + '1' }
544
- assert_equal(
545
- true,
546
- d.instance.send(:record_exceeds_max_size?,string)
547
- )
548
-
549
- # reset the constant
550
- d.instance.class.const_set(:PUT_RECORD_MAX_DATA_SIZE, original_put_record_max_data_size)
551
- end
552
-
553
- def test_build_records_array_to_put
554
- d = create_driver
555
-
556
- # PUT_RECORDS_MAX_DATA_SIZE = 1024*1024*5 is way too big, try something smaller (100), just to verify the logic not the actual value
557
- original_put_records_max_data_size = d.instance.class.send(:remove_const, :PUT_RECORDS_MAX_DATA_SIZE) if d.instance.class.const_defined?(:PUT_RECORDS_MAX_DATA_SIZE)
558
- d.instance.class.const_set(:PUT_RECORDS_MAX_DATA_SIZE, 100)
559
-
560
- # PUT_RECORDS_MAX_COUNT = 500 is way too big, try something smaller (10), just to verify the logic not the actual value
561
- original_put_records_max_count = d.instance.class.send(:remove_const, :PUT_RECORDS_MAX_COUNT) if d.instance.class.const_defined?(:PUT_RECORDS_MAX_COUNT)
562
- d.instance.class.const_set(:PUT_RECORDS_MAX_COUNT, 10)
563
-
564
- data_list = []
565
- (0..10).each do |n|
566
- data_list.push({data: '1', partition_key: '0'})
567
- end
568
- result = d.instance.send(:build_records_array_to_put,data_list)
569
- assert_equal(2,result.length)
570
- assert_equal(10,result[0].length)
571
- assert_equal(1,result[1].length)
572
-
573
- data_list = []
574
- (0..24).each do
575
- data_list.push({data: '1', partition_key: '0'})
576
- end
577
- result = d.instance.send(:build_records_array_to_put,data_list)
578
- assert_equal(3,result.length)
579
- assert_equal(10,result[0].length)
580
- assert_equal(10,result[1].length)
581
- assert_equal(5,result[2].length)
582
-
583
- data_list = []
584
- (0..20).each do
585
- data_list.push({data: '0123456789', partition_key: '1'})
586
- end
587
- # Should return 3 lists: 9*11 + 9*11 + 3*11
588
- result = d.instance.send(:build_records_array_to_put,data_list)
589
- assert_equal(3,result.length)
590
- assert_equal(9,result[0].length)
591
- assert_operator(
592
- 100, :>,
593
- result[0].reduce(0){|sum,i| sum + i[:data].length + i[:partition_key].length}
594
- )
595
- assert_equal(9,result[1].length)
596
- assert_operator(
597
- 100, :>,
598
- result[1].reduce(0){|sum,i| sum + i[:data].length + i[:partition_key].length}
599
- )
600
- assert_equal(3,result[2].length)
601
- assert_operator(
602
- 100, :>,
603
- result[2].reduce(0){|sum,i| sum + i[:data].length + i[:partition_key].length}
604
- )
605
-
606
- # reset the constants
607
- d.instance.class.const_set(:PUT_RECORDS_MAX_DATA_SIZE, original_put_records_max_data_size)
608
- d.instance.class.const_set(:PUT_RECORDS_MAX_COUNT, original_put_records_max_count)
609
- end
610
-
611
- def test_build_empty_array_to_put
612
- d = create_driver
613
- data_list = []
614
- result = d.instance.send(:build_records_array_to_put,data_list)
615
- assert_equal(0, result.length, 'Should return empty array if there is no record')
616
- end
617
-
618
- def test_build_data_to_put
619
- d = create_driver
620
- assert_equal(
621
- {key: 1},
622
- d.instance.send(:build_data_to_put,{"key"=>1})
623
- )
624
- end
625
-
626
- def test_calculate_sleep_duration
627
- d = create_driver
628
- assert_operator(
629
- 1, :>,
630
- d.instance.send(:calculate_sleep_duration,0)
631
- )
632
- assert_operator(
633
- 2, :>,
634
- d.instance.send(:calculate_sleep_duration,1)
635
- )
636
- assert_operator(
637
- 4, :>,
638
- d.instance.send(:calculate_sleep_duration,2)
639
- )
640
- end
641
- end