fluent-plugin-firehose 0.0.5 → 0.0.6

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 2524a1b0ccffd35b9757f527a8f114df2a29e4b6
4
- data.tar.gz: f2bfc58a14c833058a9e079adff3013579589f3e
3
+ metadata.gz: 47a0b638ce65b4ce3dd7ba6f73054a04de94e773
4
+ data.tar.gz: 4bd9874a047493966c34fe234096bb097dba0869
5
5
  SHA512:
6
- metadata.gz: b023e4aad6b471f915475eba4f128fda7540cccd1d855734605aea18df931a508db8871150247f8b6cb93c003f45af2559a2eef9e52954af6946d8835049635a
7
- data.tar.gz: 7580b1cf96ddcecffcf7bf0342ea69a96de73e87d96c9af0ab933e9611080083f08f2057e60986c6f8c4e600ab9d64c5841db4a7e2265aa9ec30c763700eb946
6
+ metadata.gz: d10f373fecb71c1742f57a4710bd3be8110f4f80f5968e8c1de12db66439efc120fb1c412db20cda13ff72d2be5534fdf94298c1763e7b562bf636fed1883600
7
+ data.tar.gz: a5322576e019842a8de5db379f5a1429d8fdcea89b37e35703b008cc47e0bf724f1816c6c73f169d86b99f701079a5088be6d08109c645dccc6916f3dd4e25e1
data/.gitignore CHANGED
@@ -28,9 +28,9 @@ build/
28
28
 
29
29
  # for a library or gem, you might want to ignore these files since the code is
30
30
  # intended to run in multiple environments; otherwise, check them in:
31
- # Gemfile.lock
32
- # .ruby-version
33
- # .ruby-gemset
31
+ Gemfile.lock
32
+ .ruby-version
33
+ .ruby-gemset
34
34
 
35
35
  # unless supporting rvm < 1.11.0 or doing something fancy, ignore this:
36
36
  .rvmrc
data/CHANGELOG.md CHANGED
@@ -1,3 +1,4 @@
1
1
  # CHANGELOG
2
2
 
3
+ ## 0.0.6 Fixed typos and rake tests setup
3
4
  ## 0.0.1 Forked from https://github.com/awslabs/aws-fluent-plugin-kinesis
@@ -55,6 +55,8 @@ module FluentPluginFirehose
55
55
  config_param :use_yajl, :bool, default: false
56
56
  config_param :zlib_compression, :bool, default: false
57
57
 
58
+ config_param :append_newline, :bool, default: false
59
+
58
60
  config_param :debug, :bool, default: false
59
61
 
60
62
  config_param :http_proxy, :string, default: nil
@@ -78,7 +80,7 @@ module FluentPluginFirehose
78
80
  @order_events = false
79
81
  end
80
82
 
81
- @dump_class = @use_yajl ? Yajl : JSON
83
+ @dump_class = @use_yajl ? Yajl : MultiJson
82
84
  end
83
85
 
84
86
  def start
@@ -93,7 +95,7 @@ module FluentPluginFirehose
93
95
 
94
96
  def format(tag, time, record)
95
97
  data = {
96
- data: @dump_class.dump(record)
98
+ data: @dump_class.dump(record) + if @append_newline then '\n' else '' end
97
99
  }
98
100
 
99
101
  data.to_msgpack
@@ -251,7 +253,7 @@ module FluentPluginFirehose
251
253
  end
252
254
 
253
255
  def calculate_sleep_duration(current_retry)
254
- Array.new(@retries_on_putrecords){|n| ((2 ** n) * scaling_factor)}[current_retry]
256
+ Array.new(@retries_on_putrecordbatch){|n| ((2 ** n) * scaling_factor)}[current_retry]
255
257
  end
256
258
 
257
259
  def scaling_factor
@@ -13,5 +13,5 @@
13
13
  # language governing permissions and limitations under the License.
14
14
 
15
15
  module FluentPluginFirehose
16
- VERSION = '0.0.5'
16
+ VERSION = '0.0.6'
17
17
  end
@@ -22,9 +22,8 @@ class FirehoseOutputTest < Test::Unit::TestCase
22
22
  CONFIG = %[
23
23
  aws_key_id test_key_id
24
24
  aws_sec_key test_sec_key
25
- stream_name test_stream
25
+ delivery_stream_name test_stream
26
26
  region us-east-1
27
- partition_key test_partition_key
28
27
  ]
29
28
 
30
29
  CONFIG_YAJL= CONFIG + %[
@@ -54,31 +53,28 @@ class FirehoseOutputTest < Test::Unit::TestCase
54
53
  d = create_driver
55
54
  assert_equal 'test_key_id', d.instance.aws_key_id
56
55
  assert_equal 'test_sec_key', d.instance.aws_sec_key
57
- assert_equal 'test_stream', d.instance.stream_name
56
+ assert_equal 'test_stream', d.instance.delivery_stream_name
58
57
  assert_equal 'us-east-1', d.instance.region
59
- assert_equal 'test_partition_key', d.instance.partition_key
60
58
  end
61
59
 
62
60
  def test_configure_with_credentials
63
61
  d = create_driver(<<-EOS)
64
62
  profile default
65
63
  credentials_path /home/scott/.aws/credentials
66
- stream_name test_stream
64
+ delivery_stream_name test_stream
67
65
  region us-east-1
68
- partition_key test_partition_key
69
66
  EOS
70
67
 
71
68
  assert_equal 'default', d.instance.profile
72
69
  assert_equal '/home/scott/.aws/credentials', d.instance.credentials_path
73
- assert_equal 'test_stream', d.instance.stream_name
70
+ assert_equal 'test_stream', d.instance.delivery_stream_name
74
71
  assert_equal 'us-east-1', d.instance.region
75
- assert_equal 'test_partition_key', d.instance.partition_key
76
72
  end
77
73
 
78
74
  def test_load_client
79
75
  client = stub(Object.new)
80
- client.describe_stream
81
- client.put_records { {} }
76
+ client.describe_delivery_stream
77
+ client.put_record_batch { {} }
82
78
 
83
79
  stub(Aws::Firehose::Client).new do |options|
84
80
  assert_equal("test_key_id", options[:access_key_id])
@@ -93,8 +89,8 @@ class FirehoseOutputTest < Test::Unit::TestCase
93
89
 
94
90
  def test_load_client_with_credentials
95
91
  client = stub(Object.new)
96
- client.describe_stream
97
- client.put_records { {} }
92
+ client.describe_delivery_stream
93
+ client.put_record_batch { {} }
98
94
 
99
95
  stub(Aws::Firehose::Client).new do |options|
100
96
  assert_equal(nil, options[:access_key_id])
@@ -111,9 +107,8 @@ class FirehoseOutputTest < Test::Unit::TestCase
111
107
  d = create_driver(<<-EOS)
112
108
  profile default
113
109
  credentials_path /home/scott/.aws/credentials
114
- stream_name test_stream
110
+ delivery_stream_name test_stream
115
111
  region us-east-1
116
- partition_key test_partition_key
117
112
  EOS
118
113
 
119
114
  d.run
@@ -121,12 +116,12 @@ class FirehoseOutputTest < Test::Unit::TestCase
121
116
 
122
117
  def test_load_client_with_role_arn
123
118
  client = stub(Object.new)
124
- client.describe_stream
125
- client.put_records { {} }
119
+ client.describe_delivery_stream
120
+ client.put_record_batch { {} }
126
121
 
127
122
  stub(Aws::AssumeRoleCredentials).new do |options|
128
123
  assert_equal("arn:aws:iam::001234567890:role/my-role", options[:role_arn])
129
- assert_equal("aws-fluent-plugin-kinesis", options[:role_session_name])
124
+ assert_equal("aws-fluent-plugin-firehose", options[:role_session_name])
130
125
  assert_equal("my_external_id", options[:external_id])
131
126
  assert_equal(3600, options[:duration_seconds])
132
127
  "sts_credentials"
@@ -140,9 +135,8 @@ class FirehoseOutputTest < Test::Unit::TestCase
140
135
  d = create_driver(<<-EOS)
141
136
  role_arn arn:aws:iam::001234567890:role/my-role
142
137
  external_id my_external_id
143
- stream_name test_stream
138
+ delivery_stream_name test_stream
144
139
  region us-east-1
145
- partition_key test_partition_key
146
140
  EOS
147
141
  d.run
148
142
  end
@@ -150,32 +144,18 @@ class FirehoseOutputTest < Test::Unit::TestCase
150
144
  def test_configure_with_more_options
151
145
 
152
146
  conf = %[
153
- stream_name test_stream
147
+ delivery_stream_name test_stream
154
148
  region us-east-1
155
149
  ensure_stream_connection false
156
150
  http_proxy http://proxy:3333/
157
- partition_key test_partition_key
158
- partition_key_expr record
159
- explicit_hash_key test_hash_key
160
- explicit_hash_key_expr record
161
151
  order_events true
162
152
  use_yajl true
163
153
  ]
164
154
  d = create_driver(conf)
165
- assert_equal 'test_stream', d.instance.stream_name
155
+ assert_equal 'test_stream', d.instance.delivery_stream_name
166
156
  assert_equal 'us-east-1', d.instance.region
167
157
  assert_equal false, d.instance.ensure_stream_connection
168
158
  assert_equal 'http://proxy:3333/', d.instance.http_proxy
169
- assert_equal 'test_partition_key', d.instance.partition_key
170
- assert_equal 'Proc',
171
- d.instance.instance_variable_get(:@partition_key_proc).class.to_s
172
- assert_equal 'test_hash_key', d.instance.explicit_hash_key
173
- assert_equal 'Proc',
174
- d.instance.instance_variable_get(:@explicit_hash_key_proc).class.to_s
175
- assert_equal 'a',
176
- d.instance.instance_variable_get(:@partition_key_proc).call('a')
177
- assert_equal 'a',
178
- d.instance.instance_variable_get(:@explicit_hash_key_proc).call('a')
179
159
  assert_equal true, d.instance.order_events
180
160
  assert_equal nil, d.instance.instance_variable_get(:@sequence_number_for_ordering)
181
161
  assert_equal true, d.instance.use_yajl
@@ -184,18 +164,16 @@ class FirehoseOutputTest < Test::Unit::TestCase
184
164
  def test_mode_configuration
185
165
 
186
166
  conf = %[
187
- stream_name test_stream
167
+ delivery_stream_name test_stream
188
168
  region us-east-1
189
- partition_key test_partition_key
190
169
  ]
191
170
  d = create_driver(conf)
192
171
  assert_equal(false, d.instance.order_events)
193
172
  assert_equal(false, d.instance.instance_variable_get(:@parallel_mode))
194
173
 
195
174
  conf = %[
196
- stream_name test_stream
175
+ delivery_stream_name test_stream
197
176
  region us-east-1
198
- partition_key test_partition_key
199
177
  order_events true
200
178
  ]
201
179
  d = create_driver(conf)
@@ -203,9 +181,8 @@ class FirehoseOutputTest < Test::Unit::TestCase
203
181
  assert_equal(false, d.instance.instance_variable_get(:@parallel_mode))
204
182
 
205
183
  conf = %[
206
- stream_name test_stream
184
+ delivery_stream_name test_stream
207
185
  region us-east-1
208
- partition_key test_partition_key
209
186
  num_threads 1
210
187
  ]
211
188
  d = create_driver(conf)
@@ -213,9 +190,8 @@ class FirehoseOutputTest < Test::Unit::TestCase
213
190
  assert_equal(false, d.instance.instance_variable_get(:@parallel_mode))
214
191
 
215
192
  conf = %[
216
- stream_name test_stream
193
+ delivery_stream_name test_stream
217
194
  region us-east-1
218
- partition_key test_partition_key
219
195
  num_threads 2
220
196
  ]
221
197
  d = create_driver(conf)
@@ -223,9 +199,8 @@ class FirehoseOutputTest < Test::Unit::TestCase
223
199
  assert_equal(true, d.instance.instance_variable_get(:@parallel_mode))
224
200
 
225
201
  conf = %[
226
- stream_name test_stream
202
+ delivery_stream_name test_stream
227
203
  region us-east-1
228
- partition_key test_partition_key
229
204
  detach_process 1
230
205
  ]
231
206
  d = create_driver(conf)
@@ -233,9 +208,8 @@ class FirehoseOutputTest < Test::Unit::TestCase
233
208
  assert_equal(true, d.instance.instance_variable_get(:@parallel_mode))
234
209
 
235
210
  conf = %[
236
- stream_name test_stream
211
+ delivery_stream_name test_stream
237
212
  region us-east-1
238
- partition_key test_partition_key
239
213
  order_events true
240
214
  detach_process 1
241
215
  num_threads 2
@@ -252,34 +226,26 @@ class FirehoseOutputTest < Test::Unit::TestCase
252
226
 
253
227
  d = create_driver(config)
254
228
 
255
- data1 = {"test_partition_key"=>"key1","a"=>1,"time"=>"2011-01-02T13:14:15Z","tag"=>"test"}
256
- data2 = {"test_partition_key"=>"key2","a"=>2,"time"=>"2011-01-02T13:14:15Z","tag"=>"test"}
229
+ data1 = {"a"=>1,"time"=>"2011-01-02T13:14:15Z","tag"=>"test"}
230
+ data2 = {"a"=>2,"time"=>"2011-01-02T13:14:15Z","tag"=>"test"}
257
231
 
258
232
  time = Time.parse("2011-01-02 13:14:15 UTC").to_i
259
233
  d.emit(data1, time)
260
234
  d.emit(data2, time)
261
235
 
262
- d.expect_format({
263
- 'data' => data1.to_json,
264
- 'partition_key' => 'key1' }.to_msgpack
265
- )
266
- d.expect_format({
267
- 'data' => data2.to_json,
268
- 'partition_key' => 'key2' }.to_msgpack
269
- )
236
+ d.expect_format({'data' => data1.to_json }.to_msgpack)
237
+ d.expect_format({'data' => data2.to_json }.to_msgpack)
270
238
 
271
239
  client = create_mock_client
272
- client.describe_stream(delivery_stream_name: 'test_stream')
273
- client.put_records(
240
+ client.describe_delivery_stream(delivery_stream_name: 'test_stream')
241
+ client.put_record_batch(
274
242
  delivery_stream_name: 'test_stream',
275
243
  records: [
276
244
  {
277
245
  data: data1.to_json,
278
- partition_key: 'key1'
279
246
  },
280
247
  {
281
248
  data: data2.to_json,
282
- partition_key: 'key2'
283
249
  }
284
250
  ]
285
251
  ) { {} }
@@ -292,34 +258,26 @@ class FirehoseOutputTest < Test::Unit::TestCase
292
258
 
293
259
  d = create_driver(config)
294
260
 
295
- data1 = {"test_partition_key"=>"key1","a"=>1,"time"=>"2011-01-02T13:14:15Z","tag"=>"test"}
296
- data2 = {"test_partition_key"=>"key2","a"=>2,"time"=>"2011-01-02T13:14:15Z","tag"=>"test"}
261
+ data1 = {"a"=>1,"time"=>"2011-01-02T13:14:15Z","tag"=>"test"}
262
+ data2 = {"a"=>2,"time"=>"2011-01-02T13:14:15Z","tag"=>"test"}
297
263
 
298
264
  time = Time.parse("2011-01-02 13:14:15 UTC").to_i
299
265
  d.emit(data1, time)
300
266
  d.emit(data2, time)
301
267
 
302
- d.expect_format({
303
- 'data' => data1.to_json,
304
- 'partition_key' => 'key1' }.to_msgpack
305
- )
306
- d.expect_format({
307
- 'data' => data2.to_json,
308
- 'partition_key' => 'key2' }.to_msgpack
309
- )
268
+ d.expect_format({'data' => data1.to_json}.to_msgpack)
269
+ d.expect_format({'data' => data2.to_json}.to_msgpack)
310
270
 
311
271
  client = create_mock_client
312
- client.describe_stream(delivery_stream_name: 'test_stream')
313
- client.put_records(
272
+ client.describe_delivery_stream(delivery_stream_name: 'test_stream')
273
+ client.put_record_batch(
314
274
  delivery_stream_name: 'test_stream',
315
275
  records: [
316
276
  {
317
277
  data: Zlib::Deflate.deflate(data1.to_json),
318
- partition_key: 'key1'
319
278
  },
320
279
  {
321
280
  data: Zlib::Deflate.deflate(data2.to_json),
322
- partition_key: 'key2'
323
281
  }
324
282
  ]
325
283
  ) { {} }
@@ -331,32 +289,24 @@ class FirehoseOutputTest < Test::Unit::TestCase
331
289
 
332
290
  d = create_driver(CONFIG + "\norder_events true")
333
291
 
334
- data1 = {"test_partition_key"=>"key1","a"=>1,"time"=>"2011-01-02T13:14:15Z","tag"=>"test"}
335
- data2 = {"test_partition_key"=>"key2","a"=>2,"time"=>"2011-01-02T13:14:15Z","tag"=>"test"}
292
+ data1 = {"a"=>1,"time"=>"2011-01-02T13:14:15Z","tag"=>"test"}
293
+ data2 = {"a"=>2,"time"=>"2011-01-02T13:14:15Z","tag"=>"test"}
336
294
 
337
295
  time = Time.parse("2011-01-02 13:14:15 UTC").to_i
338
296
  d.emit(data1, time)
339
297
  d.emit(data2, time)
340
298
 
341
- d.expect_format({
342
- 'data' => data1.to_json,
343
- 'partition_key' => 'key1' }.to_msgpack
344
- )
345
- d.expect_format({
346
- 'data' => data2.to_json,
347
- 'partition_key' => 'key2' }.to_msgpack
348
- )
299
+ d.expect_format({'data' => data1.to_json}.to_msgpack)
300
+ d.expect_format({'data' => data2.to_json}.to_msgpack)
349
301
 
350
302
  client = create_mock_client
351
- client.describe_stream(delivery_stream_name: 'test_stream')
303
+ client.describe_delivery_stream(delivery_stream_name: 'test_stream')
352
304
  client.put_record(
353
305
  data: data1.to_json,
354
- partition_key: 'key1',
355
306
  delivery_stream_name: 'test_stream'
356
307
  ) { {sequence_number: 1} }
357
308
  client.put_record(
358
309
  data: data2.to_json,
359
- partition_key: 'key2',
360
310
  sequence_number_for_ordering: 1,
361
311
  delivery_stream_name: 'test_stream'
362
312
  ) { {} }
@@ -366,11 +316,10 @@ class FirehoseOutputTest < Test::Unit::TestCase
366
316
 
367
317
  def test_format_at_lowlevel
368
318
  d = create_driver
369
- data = {"test_partition_key"=>"key1","a"=>1}
319
+ data = {"a"=>1}
370
320
  assert_equal(
371
321
  MessagePack.pack({
372
- "data" => data.to_json,
373
- "partition_key" => "key1"
322
+ "data" => data.to_json
374
323
  }),
375
324
  d.instance.format('test','test',data)
376
325
  )
@@ -379,22 +328,16 @@ class FirehoseOutputTest < Test::Unit::TestCase
379
328
  def test_format_at_lowlevel_with_more_options
380
329
 
381
330
  conf = %[
382
- stream_name test_stream
331
+ delivery_stream_name test_stream
383
332
  region us-east-1
384
- partition_key test_partition_key
385
- partition_key_expr record
386
- explicit_hash_key test_hash_key
387
- explicit_hash_key_expr record
388
333
  order_events true
389
334
  ]
390
335
 
391
336
  d = create_driver(conf)
392
- data = {"test_partition_key"=>"key1","test_hash_key"=>"hash1","a"=>1}
337
+ data = {"a"=>1}
393
338
  assert_equal(
394
339
  MessagePack.pack({
395
- "data" => data.to_json,
396
- "partition_key" => "key1",
397
- "explicit_hash_key" => "hash1"
340
+ "data" => data.to_json
398
341
  }),
399
342
  d.instance.format('test','test',data)
400
343
  )
@@ -404,26 +347,22 @@ class FirehoseOutputTest < Test::Unit::TestCase
404
347
 
405
348
  d = create_driver(CONFIG_YAJL)
406
349
 
407
- data1 = {"test_partition_key"=>"key1","a"=>"\xE3\x82\xA4\xE3\x83\xB3\xE3\x82\xB9\xE3\x83\x88\xE3\x83\xBC\xE3\x83\xAB","time"=>"2011-01-02T13:14:15Z","tag"=>"test"}
350
+ data1 = {"a"=>"\xE3\x82\xA4\xE3\x83\xB3\xE3\x82\xB9\xE3\x83\x88\xE3\x83\xBC\xE3\x83\xAB","time"=>"2011-01-02T13:14:15Z","tag"=>"test"}
408
351
  json = Yajl.dump(data1)
409
352
  data1["a"].force_encoding("ASCII-8BIT")
410
353
 
411
354
  time = Time.parse("2011-01-02 13:14:15 UTC").to_i
412
355
  d.emit(data1, time)
413
356
 
414
- d.expect_format({
415
- 'data' => json,
416
- 'partition_key' => 'key1' }.to_msgpack
417
- )
357
+ d.expect_format({'data' => json}.to_msgpack)
418
358
 
419
359
  client = create_mock_client
420
- client.describe_stream(delivery_stream_name: 'test_stream')
421
- client.put_records(
360
+ client.describe_delivery_stream(delivery_stream_name: 'test_stream')
361
+ client.put_record_batch(
422
362
  delivery_stream_name: 'test_stream',
423
363
  records: [
424
364
  {
425
365
  data: json,
426
- partition_key: 'key1'
427
366
  }
428
367
  ]
429
368
  ) { {} }
@@ -435,26 +374,22 @@ class FirehoseOutputTest < Test::Unit::TestCase
435
374
 
436
375
  d = create_driver(CONFIG_YAJL_WITH_COMPRESSION)
437
376
 
438
- data1 = {"test_partition_key"=>"key1","a"=>"\xE3\x82\xA4\xE3\x83\xB3\xE3\x82\xB9\xE3\x83\x88\xE3\x83\xBC\xE3\x83\xAB","time"=>"2011-01-02T13:14:15Z","tag"=>"test"}
377
+ data1 = {"a"=>"\xE3\x82\xA4\xE3\x83\xB3\xE3\x82\xB9\xE3\x83\x88\xE3\x83\xBC\xE3\x83\xAB","time"=>"2011-01-02T13:14:15Z","tag"=>"test"}
439
378
  json = Yajl.dump(data1)
440
379
  data1["a"].force_encoding("ASCII-8BIT")
441
380
 
442
381
  time = Time.parse("2011-01-02 13:14:15 UTC").to_i
443
382
  d.emit(data1, time)
444
383
 
445
- d.expect_format({
446
- 'data' => json,
447
- 'partition_key' => 'key1' }.to_msgpack
448
- )
384
+ d.expect_format({'data' => json}.to_msgpack)
449
385
 
450
386
  client = create_mock_client
451
- client.describe_stream(delivery_stream_name: 'test_stream')
452
- client.put_records(
387
+ client.describe_delivery_stream(delivery_stream_name: 'test_stream')
388
+ client.put_record_batch(
453
389
  delivery_stream_name: 'test_stream',
454
390
  records: [
455
391
  {
456
392
  data: Zlib::Deflate.deflate(json),
457
- partition_key: 'key1'
458
393
  }
459
394
  ]
460
395
  ) { {} }
@@ -462,63 +397,6 @@ class FirehoseOutputTest < Test::Unit::TestCase
462
397
  d.run
463
398
  end
464
399
 
465
- def test_get_key
466
- d = create_driver
467
- assert_equal(
468
- "1",
469
- d.instance.send(:get_key, "partition_key", {"test_partition_key" => 1})
470
- )
471
-
472
- assert_equal(
473
- "abc",
474
- d.instance.send(:get_key, "partition_key", {"test_partition_key" => "abc"})
475
- )
476
-
477
- d = create_driver(%[
478
- random_partition_key true
479
- stream_name test_stream
480
- region us-east-1'
481
- ])
482
-
483
- assert_match(
484
- /\A[\da-f-]{36}\z/,
485
- d.instance.send(:get_key, 'foo', 'bar')
486
- )
487
-
488
- d = create_driver(%[
489
- random_partition_key true
490
- partition_key test_key
491
- stream_name test_stream
492
- region us-east-1'
493
- ])
494
-
495
- assert_match(
496
- /\A[\da-f-]{36}\z/,
497
- d.instance.send(
498
- :get_key,
499
- 'partition_key',
500
- {"test_key" => 'key1'}
501
- )
502
- )
503
-
504
- d = create_driver(%[
505
- random_partition_key true
506
- partition_key test_key
507
- explicit_hash_key explicit_key
508
- stream_name test_stream
509
- region us-east-1'
510
- ])
511
-
512
- assert_match(
513
- /\A[\da-f-]{36}\z/,
514
- d.instance.send(
515
- :get_key,
516
- 'partition_key',
517
- {"test_key" => 'key1', "explicit_key" => 'key2'}
518
- )
519
- )
520
- end
521
-
522
400
  def test_record_exceeds_max_size
523
401
  d = create_driver
524
402
 
@@ -554,17 +432,17 @@ class FirehoseOutputTest < Test::Unit::TestCase
554
432
  def test_build_records_array_to_put
555
433
  d = create_driver
556
434
 
557
- # PUT_RECORDS_MAX_DATA_SIZE = 1024*1024*5 is way too big, try something smaller (100), just to verify the logic not the actual value
558
- original_put_records_max_data_size = d.instance.class.send(:remove_const, :PUT_RECORD_BATCH_MAX_DATA_SIZE) if d.instance.class.const_defined?(:PUT_RECORD_BATCH_MAX_DATA_SIZE)
435
+ # PUT_RECORD_BATCH_MAX_DATA_SIZE = 4*1024*1024 is way too big, try something smaller (100), just to verify the logic not the actual value
436
+ original_put_record_batch_max_data_size = d.instance.class.send(:remove_const, :PUT_RECORD_BATCH_MAX_DATA_SIZE) if d.instance.class.const_defined?(:PUT_RECORD_BATCH_MAX_DATA_SIZE)
559
437
  d.instance.class.const_set(:PUT_RECORD_BATCH_MAX_DATA_SIZE, 100)
560
438
 
561
- # PUT_RECORDS_MAX_COUNT = 500 is way too big, try something smaller (10), just to verify the logic not the actual value
562
- original_put_records_max_count = d.instance.class.send(:remove_const, :PUT_RECORD_BATCH_MAX_COUNT) if d.instance.class.const_defined?(:PUT_RECORD_BATCH_MAX_COUNT)
439
+ # PUT_RECORD_BATCH_MAX_COUNT = 500 is way too big, try something smaller (10), just to verify the logic not the actual value
440
+ original_put_record_batch_max_count = d.instance.class.send(:remove_const, :PUT_RECORD_BATCH_MAX_COUNT) if d.instance.class.const_defined?(:PUT_RECORD_BATCH_MAX_COUNT)
563
441
  d.instance.class.const_set(:PUT_RECORD_BATCH_MAX_COUNT, 10)
564
442
 
565
443
  data_list = []
566
444
  (0..10).each do |n|
567
- data_list.push({data: '1', partition_key: '0'})
445
+ data_list.push({data: '1'})
568
446
  end
569
447
  result = d.instance.send(:build_records_array_to_put,data_list)
570
448
  assert_equal(2,result.length)
@@ -573,7 +451,7 @@ class FirehoseOutputTest < Test::Unit::TestCase
573
451
 
574
452
  data_list = []
575
453
  (0..24).each do
576
- data_list.push({data: '1', partition_key: '0'})
454
+ data_list.push({data: '1'})
577
455
  end
578
456
  result = d.instance.send(:build_records_array_to_put,data_list)
579
457
  assert_equal(3,result.length)
@@ -583,7 +461,7 @@ class FirehoseOutputTest < Test::Unit::TestCase
583
461
 
584
462
  data_list = []
585
463
  (0..20).each do
586
- data_list.push({data: '0123456789', partition_key: '1'})
464
+ data_list.push({data: '0123456789'})
587
465
  end
588
466
  # Should return 3 lists: 9*11 + 9*11 + 3*11
589
467
  result = d.instance.send(:build_records_array_to_put,data_list)
@@ -591,22 +469,22 @@ class FirehoseOutputTest < Test::Unit::TestCase
591
469
  assert_equal(9,result[0].length)
592
470
  assert_operator(
593
471
  100, :>,
594
- result[0].reduce(0){|sum,i| sum + i[:data].length + i[:partition_key].length}
472
+ result[0].reduce(0){|sum,i| sum + i[:data].length}
595
473
  )
596
474
  assert_equal(9,result[1].length)
597
475
  assert_operator(
598
476
  100, :>,
599
- result[1].reduce(0){|sum,i| sum + i[:data].length + i[:partition_key].length}
477
+ result[1].reduce(0){|sum,i| sum + i[:data].length}
600
478
  )
601
479
  assert_equal(3,result[2].length)
602
480
  assert_operator(
603
481
  100, :>,
604
- result[2].reduce(0){|sum,i| sum + i[:data].length + i[:partition_key].length}
482
+ result[2].reduce(0){|sum,i| sum + i[:data].length}
605
483
  )
606
484
 
607
485
  # reset the constants
608
- d.instance.class.const_set(:PUT_RECORD_BATCH_MAX_DATA_SIZE, original_put_records_max_data_size)
609
- d.instance.class.const_set(:PUT_RECORD_BATCH_MAX_COUNT, original_put_records_max_count)
486
+ d.instance.class.const_set(:PUT_RECORD_BATCH_MAX_DATA_SIZE, original_put_record_batch_max_data_size)
487
+ d.instance.class.const_set(:PUT_RECORD_BATCH_MAX_COUNT, original_put_record_batch_max_count)
610
488
  end
611
489
 
612
490
  def test_build_empty_array_to_put
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fluent-plugin-firehose
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.5
4
+ version: 0.0.6
5
5
  platform: ruby
6
6
  authors:
7
7
  - Ji Oh Yoo
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2015-12-08 00:00:00.000000000 Z
12
+ date: 2015-12-11 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: bundler
@@ -130,7 +130,6 @@ files:
130
130
  - .gitignore
131
131
  - CHANGELOG.md
132
132
  - Gemfile
133
- - Gemfile.lock
134
133
  - LICENSE.txt
135
134
  - README.md
136
135
  - Rakefile
data/Gemfile.lock DELETED
@@ -1,58 +0,0 @@
1
- PATH
2
- remote: .
3
- specs:
4
- fluent-plugin-firehose (0.1.0)
5
- aws-sdk-core (>= 2.0.12, < 3.0)
6
- fluentd (>= 0.10.53, < 0.13)
7
- msgpack (>= 0.5.8)
8
- multi_json (~> 1.0)
9
-
10
- GEM
11
- remote: https://rubygems.org/
12
- specs:
13
- aws-sdk-core (2.2.4)
14
- jmespath (~> 1.0)
15
- cool.io (1.4.2)
16
- fluentd (0.12.17)
17
- cool.io (>= 1.2.2, < 2.0.0)
18
- http_parser.rb (>= 0.5.1, < 0.7.0)
19
- json (>= 1.4.3)
20
- msgpack (>= 0.5.11, < 0.6.0)
21
- sigdump (~> 0.2.2)
22
- string-scrub (>= 0.0.3)
23
- tzinfo (>= 1.0.0)
24
- tzinfo-data (>= 1.0.0)
25
- yajl-ruby (~> 1.0)
26
- http_parser.rb (0.6.0)
27
- jmespath (1.1.3)
28
- json (1.8.3)
29
- msgpack (0.5.12)
30
- multi_json (1.11.2)
31
- power_assert (0.2.6)
32
- rake (10.4.2)
33
- rr (1.1.2)
34
- sigdump (0.2.3)
35
- string-scrub (0.0.5)
36
- test-unit (3.1.5)
37
- power_assert
38
- test-unit-rr (1.0.3)
39
- rr (>= 1.1.1)
40
- test-unit (>= 2.5.2)
41
- thread_safe (0.3.5)
42
- tzinfo (1.2.2)
43
- thread_safe (~> 0.1)
44
- tzinfo-data (1.2015.7)
45
- tzinfo (>= 1.0.0)
46
- yajl-ruby (1.2.1)
47
-
48
- PLATFORMS
49
- ruby
50
-
51
- DEPENDENCIES
52
- bundler (~> 1.3)
53
- fluent-plugin-firehose!
54
- rake (~> 10.0)
55
- test-unit-rr (~> 1.0)
56
-
57
- BUNDLED WITH
58
- 1.10.6