fluent-plugin-hekk_redshift 0.0.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,526 @@
1
+ require 'test_helper'
2
+
3
+ require 'fluent/test'
4
+ require 'fluent/plugin/out_redshift'
5
+ require 'flexmock/test_unit'
6
+ require 'zlib'
7
+
8
+
9
+ class RedshiftOutputTest < Test::Unit::TestCase
10
+ def setup
11
+ require 'aws-sdk'
12
+ require 'pg'
13
+ require 'csv'
14
+ Fluent::Test.setup
15
+ end
16
+
17
+ CONFIG_BASE= %[
18
+ aws_key_id test_key_id
19
+ aws_sec_key test_sec_key
20
+ s3_bucket test_bucket
21
+ path log
22
+ redshift_host test_host
23
+ redshift_dbname test_db
24
+ redshift_user test_user
25
+ redshift_password test_password
26
+ redshift_tablename test_table
27
+ buffer_type memory
28
+ utc
29
+ log_suffix id:5 host:localhost
30
+ ]
31
+ CONFIG_CSV= %[
32
+ #{CONFIG_BASE}
33
+ file_type csv
34
+ ]
35
+ CONFIG_TSV= %[
36
+ #{CONFIG_BASE}
37
+ file_type tsv
38
+ ]
39
+ CONFIG_JSON = %[
40
+ #{CONFIG_BASE}
41
+ file_type json
42
+ ]
43
+ CONFIG_JSON_WITH_SCHEMA = %[
44
+ #{CONFIG_BASE}
45
+ redshift_schemaname test_schema
46
+ file_type json
47
+ ]
48
+ CONFIG_MSGPACK = %[
49
+ #{CONFIG_BASE}
50
+ file_type msgpack
51
+ ]
52
+ CONFIG_PIPE_DELIMITER= %[
53
+ #{CONFIG_BASE}
54
+ delimiter |
55
+ ]
56
+ CONFIG_PIPE_DELIMITER_WITH_NAME= %[
57
+ #{CONFIG_BASE}
58
+ file_type pipe
59
+ delimiter |
60
+ ]
61
+ CONFIG=CONFIG_CSV
62
+
63
+ RECORD_CSV_A = {"log" => %[val_a,val_b,val_c,val_d]}
64
+ RECORD_CSV_B = {"log" => %[val_e,val_f,val_g,val_h]}
65
+ RECORD_TSV_A = {"log" => %[val_a\tval_b\tval_c\tval_d]}
66
+ RECORD_TSV_B = {"log" => %[val_e\tval_f\tval_g\tval_h]}
67
+ RECORD_JSON_A = {"log" => %[{"key_a" : "val_a", "key_b" : "val_b"}]}
68
+ RECORD_JSON_B = {"log" => %[{"key_c" : "val_c", "key_d" : "val_d"}]}
69
+ RECORD_MSGPACK_A = {"key_a" => "val_a", "key_b" => "val_b"}
70
+ RECORD_MSGPACK_B = {"key_c" => "val_c", "key_d" => "val_d"}
71
+ DEFAULT_TIME = Time.parse("2013-03-06 12:15:02 UTC").to_i
72
+
73
+ def create_driver(conf = CONFIG, tag='test.input')
74
+ Fluent::Test::BufferedOutputTestDriver.new(Fluent::RedshiftOutput, tag).configure(conf)
75
+ end
76
+
77
+ def create_driver_no_write(conf = CONFIG, tag='test.input')
78
+ Fluent::Test::BufferedOutputTestDriver.new(Fluent::RedshiftOutput, tag) do
79
+ def write(chunk)
80
+ chunk.read
81
+ end
82
+ end.configure(conf)
83
+ end
84
+
85
+ def test_configure
86
+ assert_raise(Fluent::ConfigError) {
87
+ d = create_driver('')
88
+ }
89
+ assert_raise(Fluent::ConfigError) {
90
+ d = create_driver(CONFIG_BASE)
91
+ }
92
+ d = create_driver(CONFIG_CSV)
93
+ assert_equal "test_key_id", d.instance.aws_key_id
94
+ assert_equal "test_sec_key", d.instance.aws_sec_key
95
+ assert_equal "test_bucket", d.instance.s3_bucket
96
+ assert_equal "log/", d.instance.path
97
+ assert_equal "test_host", d.instance.redshift_host
98
+ assert_equal 5439, d.instance.redshift_port
99
+ assert_equal "test_db", d.instance.redshift_dbname
100
+ assert_equal "test_user", d.instance.redshift_user
101
+ assert_equal "test_password", d.instance.redshift_password
102
+ assert_equal "test_table", d.instance.redshift_tablename
103
+ assert_equal nil, d.instance.redshift_schemaname
104
+ assert_equal "FILLRECORD ACCEPTANYDATE TRUNCATECOLUMNS", d.instance.redshift_copy_base_options
105
+ assert_equal nil, d.instance.redshift_copy_options
106
+ assert_equal "csv", d.instance.file_type
107
+ assert_equal ",", d.instance.delimiter
108
+ assert_equal true, d.instance.utc
109
+ end
110
+ def test_configure_with_schemaname
111
+ d = create_driver(CONFIG_JSON_WITH_SCHEMA)
112
+ assert_equal "test_schema", d.instance.redshift_schemaname
113
+ end
114
+ def test_configure_localtime
115
+ d = create_driver(CONFIG_CSV.gsub(/ *utc */, ''))
116
+ assert_equal false, d.instance.utc
117
+ end
118
+ def test_configure_no_path
119
+ d = create_driver(CONFIG_CSV.gsub(/ *path *.+$/, ''))
120
+ assert_equal "", d.instance.path
121
+ end
122
+ def test_configure_root_path
123
+ d = create_driver(CONFIG_CSV.gsub(/ *path *.+$/, 'path /'))
124
+ assert_equal "", d.instance.path
125
+ end
126
+ def test_configure_path_with_slash
127
+ d = create_driver(CONFIG_CSV.gsub(/ *path *.+$/, 'path log/'))
128
+ assert_equal "log/", d.instance.path
129
+ end
130
+ def test_configure_path_starts_with_slash
131
+ d = create_driver(CONFIG_CSV.gsub(/ *path *.+$/, 'path /log/'))
132
+ assert_equal "log/", d.instance.path
133
+ end
134
+ def test_configure_path_starts_with_slash_without_last_slash
135
+ d = create_driver(CONFIG_CSV.gsub(/ *path *.+$/, 'path /log'))
136
+ assert_equal "log/", d.instance.path
137
+ end
138
+ def test_configure_tsv
139
+ d1 = create_driver(CONFIG_TSV)
140
+ assert_equal "tsv", d1.instance.file_type
141
+ assert_equal "\t", d1.instance.delimiter
142
+ end
143
+ def test_configure_json
144
+ d2 = create_driver(CONFIG_JSON)
145
+ assert_equal "json", d2.instance.file_type
146
+ assert_equal "\t", d2.instance.delimiter
147
+ end
148
+ def test_configure_msgpack
149
+ d2 = create_driver(CONFIG_MSGPACK)
150
+ assert_equal "msgpack", d2.instance.file_type
151
+ assert_equal "\t", d2.instance.delimiter
152
+ end
153
+ def test_configure_original_file_type
154
+ d3 = create_driver(CONFIG_PIPE_DELIMITER)
155
+ assert_equal nil, d3.instance.file_type
156
+ assert_equal "|", d3.instance.delimiter
157
+
158
+ d4 = create_driver(CONFIG_PIPE_DELIMITER_WITH_NAME)
159
+ assert_equal "pipe", d4.instance.file_type
160
+ assert_equal "|", d4.instance.delimiter
161
+ end
162
+ def test_configure_no_log_suffix
163
+ d = create_driver(CONFIG_CSV.gsub(/ *log_suffix *.+$/, ''))
164
+ assert_equal "", d.instance.log_suffix
165
+ end
166
+
167
+ def emit_csv(d)
168
+ d.emit(RECORD_CSV_A, DEFAULT_TIME)
169
+ d.emit(RECORD_CSV_B, DEFAULT_TIME)
170
+ end
171
+ def emit_tsv(d)
172
+ d.emit(RECORD_TSV_A, DEFAULT_TIME)
173
+ d.emit(RECORD_TSV_B, DEFAULT_TIME)
174
+ end
175
+ def emit_json(d)
176
+ d.emit(RECORD_JSON_A, DEFAULT_TIME)
177
+ d.emit(RECORD_JSON_B, DEFAULT_TIME)
178
+ end
179
+ def emit_msgpack(d)
180
+ d.emit(RECORD_MSGPACK_A, DEFAULT_TIME)
181
+ d.emit(RECORD_MSGPACK_B, DEFAULT_TIME)
182
+ end
183
+
184
+ def test_format_csv
185
+ d_csv = create_driver_no_write(CONFIG_CSV)
186
+ emit_csv(d_csv)
187
+ d_csv.expect_format RECORD_CSV_A['log'] + "\n"
188
+ d_csv.expect_format RECORD_CSV_B['log'] + "\n"
189
+ d_csv.run
190
+ end
191
+ def test_format_tsv
192
+ d_tsv = create_driver_no_write(CONFIG_TSV)
193
+ emit_tsv(d_tsv)
194
+ d_tsv.expect_format RECORD_TSV_A['log'] + "\n"
195
+ d_tsv.expect_format RECORD_TSV_B['log'] + "\n"
196
+ d_tsv.run
197
+ end
198
+ def test_format_json
199
+ d_json = create_driver_no_write(CONFIG_JSON)
200
+ emit_json(d_json)
201
+ d_json.expect_format RECORD_JSON_A.to_msgpack
202
+ d_json.expect_format RECORD_JSON_B.to_msgpack
203
+ d_json.run
204
+ end
205
+
206
+ def test_format_msgpack
207
+ d_msgpack = create_driver_no_write(CONFIG_MSGPACK)
208
+ emit_msgpack(d_msgpack)
209
+ d_msgpack.expect_format({ 'log' => RECORD_MSGPACK_A }.to_msgpack)
210
+ d_msgpack.expect_format({ 'log' => RECORD_MSGPACK_B }.to_msgpack)
211
+ d_msgpack.run
212
+ end
213
+
214
+ class PGConnectionMock
215
+ def initialize(options = {})
216
+ @return_keys = options[:return_keys] || ['key_a', 'key_b', 'key_c', 'key_d', 'key_e', 'key_f', 'key_g', 'key_h']
217
+ @target_schema = options[:schemaname] || nil
218
+ @target_table = options[:tablename] || 'test_table'
219
+ end
220
+
221
+ def expected_column_list_query
222
+ if @target_schema
223
+ /\Aselect column_name from INFORMATION_SCHEMA.COLUMNS where table_schema = '#{@target_schema}' and table_name = '#{@target_table}'/
224
+ else
225
+ /\Aselect column_name from INFORMATION_SCHEMA.COLUMNS where table_name = '#{@target_table}'/
226
+ end
227
+ end
228
+
229
+ def expected_copy_query
230
+ if @target_schema
231
+ /\Acopy #{@target_schema}.#{@target_table} from/
232
+ else
233
+ /\Acopy #{@target_table} from/
234
+ end
235
+ end
236
+
237
+ def exec(sql, &block)
238
+ if block_given?
239
+ if sql =~ expected_column_list_query
240
+ yield @return_keys.collect{|key| {'column_name' => key}}
241
+ else
242
+ yield []
243
+ end
244
+ else
245
+ unless sql =~ expected_copy_query
246
+ error = PG::Error.new("ERROR: Load into table '#{@target_table}' failed. Check 'stl_load_errors' system table for details.")
247
+ error.result = "ERROR: Load into table '#{@target_table}' failed. Check 'stl_load_errors' system table for details."
248
+ raise error
249
+ end
250
+ end
251
+ end
252
+
253
+ def close
254
+ end
255
+ end
256
+
257
+ def setup_pg_mock
258
+ # create mock of PG
259
+ def PG.connect(dbinfo)
260
+ return PGConnectionMock.new
261
+ end
262
+ end
263
+
264
+ def setup_s3_mock(expected_data)
265
+ current_time = Time.now
266
+
267
+ # create mock of s3 object
268
+ s3obj = flexmock(AWS::S3::S3Object)
269
+ s3obj.should_receive(:exists?).with_any_args.and_return { false }
270
+ s3obj.should_receive(:write).with(
271
+ # pathname
272
+ on { |pathname|
273
+ data = nil
274
+ pathname.open { |f|
275
+ gz = Zlib::GzipReader.new(f)
276
+ data = gz.read
277
+ gz.close
278
+ }
279
+ assert_equal expected_data, data
280
+ },
281
+ :acl => :bucket_owner_full_control
282
+ ).and_return { true }
283
+
284
+ # create mock of s3 object collection
285
+ s3obj_col = flexmock(AWS::S3::ObjectCollection)
286
+ s3obj_col.should_receive(:[]).with(
287
+ on { |key|
288
+ expected_key = current_time.utc.strftime("log/year=%Y/month=%m/day=%d/hour=%H/%Y%m%d-%H%M_00.gz")
289
+ key == expected_key
290
+ }).
291
+ and_return {
292
+ s3obj
293
+ }
294
+
295
+ # create mock of s3 bucket
296
+ flexmock(AWS::S3::Bucket).new_instances do |bucket|
297
+ bucket.should_receive(:objects).with_any_args.
298
+ and_return {
299
+ s3obj_col
300
+ }
301
+ end
302
+ end
303
+
304
+ def setup_tempfile_mock_to_be_closed
305
+ flexmock(Tempfile).new_instances.should_receive(:close!).at_least.once
306
+ end
307
+
308
+ def setup_mocks(expected_data)
309
+ setup_pg_mock
310
+ setup_s3_mock(expected_data) end
311
+
312
+ def test_write_with_csv
313
+ setup_mocks(%[val_a,val_b,val_c,val_d\nval_e,val_f,val_g,val_h\n])
314
+ setup_tempfile_mock_to_be_closed
315
+ d_csv = create_driver
316
+ emit_csv(d_csv)
317
+ assert_equal true, d_csv.run
318
+ end
319
+
320
+ def test_write_with_json
321
+ setup_mocks(%[val_a\tval_b\t\t\t\t\t\t\n\t\tval_c\tval_d\t\t\t\t\n])
322
+ setup_tempfile_mock_to_be_closed
323
+ d_json = create_driver(CONFIG_JSON)
324
+ emit_json(d_json)
325
+ assert_equal true, d_json.run
326
+ end
327
+
328
+ def test_write_with_json_hash_value
329
+ setup_mocks("val_a\t{\"foo\":\"var\"}\t\t\t\t\t\t\n\t\tval_c\tval_d\t\t\t\t\n")
330
+ d_json = create_driver(CONFIG_JSON)
331
+ d_json.emit({"log" => %[{"key_a" : "val_a", "key_b" : {"foo" : "var"}}]} , DEFAULT_TIME)
332
+ d_json.emit(RECORD_JSON_B, DEFAULT_TIME)
333
+ assert_equal true, d_json.run
334
+ end
335
+
336
+ def test_write_with_json_array_value
337
+ setup_mocks("val_a\t[\"foo\",\"var\"]\t\t\t\t\t\t\n\t\tval_c\tval_d\t\t\t\t\n")
338
+ d_json = create_driver(CONFIG_JSON)
339
+ d_json.emit({"log" => %[{"key_a" : "val_a", "key_b" : ["foo", "var"]}]} , DEFAULT_TIME)
340
+ d_json.emit(RECORD_JSON_B, DEFAULT_TIME)
341
+ assert_equal true, d_json.run
342
+ end
343
+
344
+ def test_write_with_json_including_tab_newline_quote
345
+ setup_mocks("val_a_with_\\\t_tab_\\\n_newline\tval_b_with_\\\\_quote\t\t\t\t\t\t\n\t\tval_c\tval_d\t\t\t\t\n")
346
+ d_json = create_driver(CONFIG_JSON)
347
+ d_json.emit({"log" => %[{"key_a" : "val_a_with_\\t_tab_\\n_newline", "key_b" : "val_b_with_\\\\_quote"}]} , DEFAULT_TIME)
348
+ d_json.emit(RECORD_JSON_B, DEFAULT_TIME)
349
+ assert_equal true, d_json.run
350
+ end
351
+
352
+ def test_write_with_json_no_data
353
+ setup_mocks("")
354
+ d_json = create_driver(CONFIG_JSON)
355
+ d_json.emit("", DEFAULT_TIME)
356
+ d_json.emit("", DEFAULT_TIME)
357
+ assert_equal false, d_json.run
358
+ end
359
+
360
+ def test_write_with_json_invalid_one_line
361
+ setup_mocks(%[\t\tval_c\tval_d\t\t\t\t\n])
362
+ d_json = create_driver(CONFIG_JSON)
363
+ d_json.emit({"log" => %[}}]}, DEFAULT_TIME)
364
+ d_json.emit(RECORD_JSON_B, DEFAULT_TIME)
365
+ assert_equal true, d_json.run
366
+ end
367
+
368
+ def test_write_with_json_no_available_data
369
+ setup_mocks(%[val_a\tval_b\t\t\t\t\t\t\n])
370
+ d_json = create_driver(CONFIG_JSON)
371
+ d_json.emit(RECORD_JSON_A, DEFAULT_TIME)
372
+ d_json.emit({"log" => %[{"key_o" : "val_o", "key_p" : "val_p"}]}, DEFAULT_TIME)
373
+ assert_equal true, d_json.run
374
+ end
375
+
376
+ def test_write_with_msgpack
377
+ setup_mocks(%[val_a\tval_b\t\t\t\t\t\t\n\t\tval_c\tval_d\t\t\t\t\n])
378
+ d_msgpack = create_driver(CONFIG_MSGPACK)
379
+ emit_msgpack(d_msgpack)
380
+ assert_equal true, d_msgpack.run
381
+ end
382
+
383
+ def test_write_with_msgpack_hash_value
384
+ setup_mocks("val_a\t{\"foo\":\"var\"}\t\t\t\t\t\t\n\t\tval_c\tval_d\t\t\t\t\n")
385
+ d_msgpack = create_driver(CONFIG_MSGPACK)
386
+ d_msgpack.emit({"key_a" => "val_a", "key_b" => {"foo" => "var"}} , DEFAULT_TIME)
387
+ d_msgpack.emit(RECORD_MSGPACK_B, DEFAULT_TIME)
388
+ assert_equal true, d_msgpack.run
389
+ end
390
+
391
+ def test_write_with_msgpack_array_value
392
+ setup_mocks("val_a\t[\"foo\",\"var\"]\t\t\t\t\t\t\n\t\tval_c\tval_d\t\t\t\t\n")
393
+ d_msgpack = create_driver(CONFIG_MSGPACK)
394
+ d_msgpack.emit({"key_a" => "val_a", "key_b" => ["foo", "var"]} , DEFAULT_TIME)
395
+ d_msgpack.emit(RECORD_MSGPACK_B, DEFAULT_TIME)
396
+ assert_equal true, d_msgpack.run
397
+ end
398
+
399
+ def test_write_with_msgpack_including_tab_newline_quote
400
+ setup_mocks("val_a_with_\\\t_tab_\\\n_newline\tval_b_with_\\\\_quote\t\t\t\t\t\t\n\t\tval_c\tval_d\t\t\t\t\n")
401
+ d_msgpack = create_driver(CONFIG_MSGPACK)
402
+ d_msgpack.emit({"key_a" => "val_a_with_\t_tab_\n_newline", "key_b" => "val_b_with_\\_quote"} , DEFAULT_TIME)
403
+ d_msgpack.emit(RECORD_MSGPACK_B, DEFAULT_TIME)
404
+ assert_equal true, d_msgpack.run
405
+ end
406
+
407
+ def test_write_with_msgpack_no_data
408
+ setup_mocks("")
409
+ d_msgpack = create_driver(CONFIG_MSGPACK)
410
+ d_msgpack.emit({}, DEFAULT_TIME)
411
+ d_msgpack.emit({}, DEFAULT_TIME)
412
+ assert_equal false, d_msgpack.run
413
+ end
414
+
415
+ def test_write_with_msgpack_no_available_data
416
+ setup_mocks(%[val_a\tval_b\t\t\t\t\t\t\n])
417
+ d_msgpack = create_driver(CONFIG_MSGPACK)
418
+ d_msgpack.emit(RECORD_MSGPACK_A, DEFAULT_TIME)
419
+ d_msgpack.emit({"key_o" => "val_o", "key_p" => "val_p"}, DEFAULT_TIME)
420
+ assert_equal true, d_msgpack.run
421
+ end
422
+
423
+ def test_write_redshift_connection_error
424
+ def PG.connect(dbinfo)
425
+ return Class.new do
426
+ def initialize(return_keys=[]); end
427
+ def exec(sql)
428
+ raise PG::Error, "redshift connection error"
429
+ end
430
+ def close; end
431
+ end.new
432
+ end
433
+ setup_s3_mock(%[val_a,val_b,val_c,val_d\nval_e,val_f,val_g,val_h\n])
434
+
435
+ d_csv = create_driver
436
+ emit_csv(d_csv)
437
+ assert_raise(PG::Error) {
438
+ d_csv.run
439
+ }
440
+ end
441
+
442
+ def test_write_redshift_load_error
443
+ PG::Error.module_eval { attr_accessor :result}
444
+ def PG.connect(dbinfo)
445
+ return Class.new do
446
+ def initialize(return_keys=[]); end
447
+ def exec(sql)
448
+ error = PG::Error.new("ERROR: Load into table 'apache_log' failed. Check 'stl_load_errors' system table for details.")
449
+ error.result = "ERROR: Load into table 'apache_log' failed. Check 'stl_load_errors' system table for details."
450
+ raise error
451
+ end
452
+ def close; end
453
+ end.new
454
+ end
455
+ setup_s3_mock(%[val_a,val_b,val_c,val_d\nval_e,val_f,val_g,val_h\n])
456
+
457
+ d_csv = create_driver
458
+ emit_csv(d_csv)
459
+ assert_equal false, d_csv.run
460
+ end
461
+
462
+ def test_write_with_json_redshift_connection_error
463
+ def PG.connect(dbinfo)
464
+ return Class.new do
465
+ def initialize(return_keys=[]); end
466
+ def exec(sql, &block)
467
+ error = PG::Error.new("redshift connection error")
468
+ raise error
469
+ end
470
+ def close; end
471
+ end.new
472
+ end
473
+ setup_s3_mock(%[val_a,val_b,val_c,val_d\nval_e,val_f,val_g,val_h\n])
474
+
475
+ d_json = create_driver(CONFIG_JSON)
476
+ emit_json(d_json)
477
+ assert_raise(PG::Error) {
478
+ d_json.run
479
+ }
480
+ end
481
+
482
+ def test_write_with_json_no_table_on_redshift
483
+ def PG.connect(dbinfo)
484
+ return Class.new do
485
+ def initialize(return_keys=[]); end
486
+ def exec(sql, &block)
487
+ yield [] if block_given?
488
+ end
489
+ def close; end
490
+ end.new
491
+ end
492
+ setup_s3_mock(%[val_a,val_b,val_c,val_d\nval_e,val_f,val_g,val_h\n])
493
+
494
+ d_json = create_driver(CONFIG_JSON)
495
+ emit_json(d_json)
496
+ assert_equal false, d_json.run
497
+ end
498
+
499
+ def test_write_with_json_failed_to_get_columns
500
+ def PG.connect(dbinfo)
501
+ return Class.new do
502
+ def initialize(return_keys=[]); end
503
+ def exec(sql, &block)
504
+ end
505
+ def close; end
506
+ end.new
507
+ end
508
+ setup_s3_mock("")
509
+
510
+ d_json = create_driver(CONFIG_JSON)
511
+ emit_json(d_json)
512
+ assert_raise(RuntimeError, "failed to fetch the redshift table definition.") {
513
+ d_json.run
514
+ }
515
+ end
516
+
517
+ def test_write_with_json_fetch_column_with_schema
518
+ def PG.connect(dbinfo)
519
+ return PGConnectionMock.new(:schemaname => 'test_schema')
520
+ end
521
+ setup_s3_mock(%[val_a\tval_b\t\t\t\t\t\t\n\t\tval_c\tval_d\t\t\t\t\n])
522
+ d_json = create_driver(CONFIG_JSON_WITH_SCHEMA)
523
+ emit_json(d_json)
524
+ assert_equal true, d_json.run
525
+ end
526
+ end
@@ -0,0 +1,8 @@
1
+ if ENV['COVERAGE']
2
+ require 'simplecov'
3
+ SimpleCov.start do
4
+ add_filter 'test/'
5
+ add_filter 'pkg/'
6
+ add_filter 'vendor/'
7
+ end
8
+ end
metadata ADDED
@@ -0,0 +1,155 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: fluent-plugin-hekk_redshift
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.0.4
5
+ prerelease:
6
+ platform: ruby
7
+ authors:
8
+ - Masashi Miyazaki
9
+ autorequire:
10
+ bindir: bin
11
+ cert_chain: []
12
+ date: 2013-09-12 00:00:00.000000000 Z
13
+ dependencies:
14
+ - !ruby/object:Gem::Dependency
15
+ name: fluentd
16
+ requirement: !ruby/object:Gem::Requirement
17
+ none: false
18
+ requirements:
19
+ - - ~>
20
+ - !ruby/object:Gem::Version
21
+ version: 0.10.0
22
+ type: :runtime
23
+ prerelease: false
24
+ version_requirements: !ruby/object:Gem::Requirement
25
+ none: false
26
+ requirements:
27
+ - - ~>
28
+ - !ruby/object:Gem::Version
29
+ version: 0.10.0
30
+ - !ruby/object:Gem::Dependency
31
+ name: aws-sdk
32
+ requirement: !ruby/object:Gem::Requirement
33
+ none: false
34
+ requirements:
35
+ - - ! '>='
36
+ - !ruby/object:Gem::Version
37
+ version: 1.6.3
38
+ type: :runtime
39
+ prerelease: false
40
+ version_requirements: !ruby/object:Gem::Requirement
41
+ none: false
42
+ requirements:
43
+ - - ! '>='
44
+ - !ruby/object:Gem::Version
45
+ version: 1.6.3
46
+ - !ruby/object:Gem::Dependency
47
+ name: pg
48
+ requirement: !ruby/object:Gem::Requirement
49
+ none: false
50
+ requirements:
51
+ - - ~>
52
+ - !ruby/object:Gem::Version
53
+ version: 0.16.0
54
+ type: :runtime
55
+ prerelease: false
56
+ version_requirements: !ruby/object:Gem::Requirement
57
+ none: false
58
+ requirements:
59
+ - - ~>
60
+ - !ruby/object:Gem::Version
61
+ version: 0.16.0
62
+ - !ruby/object:Gem::Dependency
63
+ name: rake
64
+ requirement: !ruby/object:Gem::Requirement
65
+ none: false
66
+ requirements:
67
+ - - ! '>='
68
+ - !ruby/object:Gem::Version
69
+ version: '0'
70
+ type: :development
71
+ prerelease: false
72
+ version_requirements: !ruby/object:Gem::Requirement
73
+ none: false
74
+ requirements:
75
+ - - ! '>='
76
+ - !ruby/object:Gem::Version
77
+ version: '0'
78
+ - !ruby/object:Gem::Dependency
79
+ name: simplecov
80
+ requirement: !ruby/object:Gem::Requirement
81
+ none: false
82
+ requirements:
83
+ - - ! '>='
84
+ - !ruby/object:Gem::Version
85
+ version: 0.5.4
86
+ type: :development
87
+ prerelease: false
88
+ version_requirements: !ruby/object:Gem::Requirement
89
+ none: false
90
+ requirements:
91
+ - - ! '>='
92
+ - !ruby/object:Gem::Version
93
+ version: 0.5.4
94
+ - !ruby/object:Gem::Dependency
95
+ name: flexmock
96
+ requirement: !ruby/object:Gem::Requirement
97
+ none: false
98
+ requirements:
99
+ - - ! '>='
100
+ - !ruby/object:Gem::Version
101
+ version: 1.3.1
102
+ type: :development
103
+ prerelease: false
104
+ version_requirements: !ruby/object:Gem::Requirement
105
+ none: false
106
+ requirements:
107
+ - - ! '>='
108
+ - !ruby/object:Gem::Version
109
+ version: 1.3.1
110
+ description: Amazon Redshift output plugin for Fluentd
111
+ email:
112
+ - mmasashi@gmail.com
113
+ executables: []
114
+ extensions: []
115
+ extra_rdoc_files: []
116
+ files:
117
+ - .gitignore
118
+ - Gemfile
119
+ - LICENSE
120
+ - README.md
121
+ - Rakefile
122
+ - VERSION
123
+ - fluent-plugin-hekk_redshift.gemspec
124
+ - lib/fluent/plugin/out_hekk_redshift.rb
125
+ - test/plugin/test_out_redshift.rb
126
+ - test/test_helper.rb
127
+ homepage: https://github.com/hekk/fluent-plugin-redshift
128
+ licenses:
129
+ - APLv2
130
+ post_install_message:
131
+ rdoc_options: []
132
+ require_paths:
133
+ - lib
134
+ required_ruby_version: !ruby/object:Gem::Requirement
135
+ none: false
136
+ requirements:
137
+ - - ! '>='
138
+ - !ruby/object:Gem::Version
139
+ version: '0'
140
+ required_rubygems_version: !ruby/object:Gem::Requirement
141
+ none: false
142
+ requirements:
143
+ - - ! '>='
144
+ - !ruby/object:Gem::Version
145
+ version: '0'
146
+ requirements: []
147
+ rubyforge_project:
148
+ rubygems_version: 1.8.23
149
+ signing_key:
150
+ specification_version: 3
151
+ summary: Amazon Redshift output plugin for Fluentd
152
+ test_files:
153
+ - test/plugin/test_out_redshift.rb
154
+ - test/test_helper.rb
155
+ has_rdoc: false