fluent-plugin-redshift-kwarter 0.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA1:
3
+ metadata.gz: 2da95d008425386ffe13ffcbdaf19b2b78982853
4
+ data.tar.gz: 0729d2617cbeac1f7970c0c56aae1a65502b1e8f
5
+ SHA512:
6
+ metadata.gz: c5bf1ffc8bfbd8dc5ec9296e817d7f0ff559d797648d5443a2d2024694114546af5d5896f8bafaf8b97dc7ddfa4bcd20f984656c4ef0eef0b55f4e2527f1254f
7
+ data.tar.gz: 9f1875e617ac4b5cad4b0e13abf9bcb720c8af9c65a9f1250592f56c8bcde7caa31cfcdd0136b6016688de7bc90be72c6716e33a913c4f5aea60daf7f3d13bd7
@@ -0,0 +1,19 @@
1
+ *.gem
2
+ *.rbc
3
+ *.swp
4
+ .bundle
5
+ .config
6
+ .yardoc
7
+ Gemfile.lock
8
+ InstalledFiles
9
+ _yardoc
10
+ coverage
11
+ doc/
12
+ lib/bundler/man
13
+ pkg
14
+ rdoc
15
+ spec/reports
16
+ test/tmp
17
+ test/version_tmp
18
+ tmp
19
+ vendor/
data/Gemfile ADDED
@@ -0,0 +1,3 @@
1
+ source 'https://rubygems.org'
2
+
3
+ gemspec
@@ -0,0 +1,154 @@
1
+ Amazon Redshift output plugin for Fluentd
2
+ ========
3
+
4
+ ## Overview
5
+
6
+ Amazon Redshift output plugin uploads event logs to an Amazon Redshift Cluster. Supportted data formats are csv, tsv and json. An S3 bucket and a Redshift Cluster are required to use this plugin.
7
+
8
+ ## Installation
9
+
10
+ gem install fluent-plugin-redshift
11
+
12
+ ## Configuration
13
+
14
+ Format:
15
+
16
+ <match my.tag>
17
+ type redshift
18
+
19
+ # s3 (for copying data to redshift)
20
+ aws_key_id YOUR_AWS_KEY_ID
21
+ aws_sec_key YOUR_AWS_SECRET_KEY
22
+ s3_bucket YOUR_S3_BUCKET
23
+ s3_endpoint YOUR_S3_BUCKET_END_POINT
24
+ path YOUR_S3_PATH
25
+ timestamp_key_format year=%Y/month=%m/day=%d/hour=%H/%Y%m%d-%H%M
26
+
27
+ # redshift
28
+ redshift_host YOUR_AMAZON_REDSHIFT_CLUSTER_END_POINT
29
+ redshift_port YOUR_AMAZON_REDSHIFT_CLUSTER_PORT
30
+ redshift_dbname YOUR_AMAZON_REDSHIFT_CLUSTER_DATABASE_NAME
31
+ redshift_user YOUR_AMAZON_REDSHIFT_CLUSTER_USER_NAME
32
+ redshift_password YOUR_AMAZON_REDSHIFT_CLUSTER_PASSWORD
33
+ redshift_schemaname YOUR_AMAZON_REDSHIFT_CLUSTER_TARGET_SCHEMA_NAME
34
+ redshift_tablename YOUR_AMAZON_REDSHIFT_CLUSTER_TARGET_TABLE_NAME
35
+ file_type [tsv|csv|json|msgpack]
36
+
37
+ # buffer
38
+ buffer_type file
39
+ buffer_path /var/log/fluent/redshift
40
+ flush_interval 15m
41
+ buffer_chunk_limit 1g
42
+ </match>
43
+
44
+ Example (watch and upload json formatted apache log):
45
+
46
+ <source>
47
+ type tail
48
+ path redshift_test.json
49
+ pos_file redshift_test_json.pos
50
+ tag redshift.json
51
+ format /^(?<log>.*)$/
52
+ </source>
53
+
54
+ <match redshift.json>
55
+ type redshift
56
+
57
+ # s3 (for copying data to redshift)
58
+ aws_key_id YOUR_AWS_KEY_ID
59
+ aws_sec_key YOUR_AWS_SECRET_KEY
60
+ s3_bucket hapyrus-example
61
+ s3_endpoint s3.amazonaws.com
62
+ path path/on/s3/apache_json_log/
63
+ timestamp_key_format year=%Y/month=%m/day=%d/hour=%H/%Y%m%d-%H%M
64
+
65
+ # redshift
66
+ redshift_host xxx-yyy-zzz.xxxxxxxxxx.us-east-1.redshift.amazonaws.com
67
+ redshift_port 5439
68
+ redshift_dbname fluent-redshift-test
69
+ redshift_user fluent
70
+ redshift_password fluent-password
71
+ redshift_tablename apache_log
72
+ file_type json
73
+
74
+ # buffer
75
+ buffer_type file
76
+ buffer_path /var/log/fluent/redshift
77
+ flush_interval 15m
78
+ buffer_chunk_limit 1g
79
+ <match>
80
+
81
+ + `type` (required) : The value must be `redshift`.
82
+
83
+ + `aws_key_id` (required) : AWS access key id to access s3 bucket.
84
+
85
+ + `aws_sec_key` (required) : AWS securet key id to access s3 bucket.
86
+
87
+ + `s3_bucket` (required) : s3 bucket name. S3 bucket must be same as the region of your Redshift cluster.
88
+
89
+ + `s3_endpoint` : s3 endpoint.
90
+
91
+ + `path` (required) : s3 path to input.
92
+
93
+ + `timestamp_key_format` : The format of the object keys. It can include date-format directives.
94
+
95
+ - Default parameter is "year=%Y/month=%m/day=%d/hour=%H/%Y%m%d-%H%M"
96
+ - For example, the s3 path is as following with the above example configration.
97
+ <pre>
98
+ hapyrus-example/apache_json_log/year=2013/month=03/day=05/hour=12/20130305_1215_00.gz
99
+ hapyrus-example/apache_json_log/year=2013/month=03/day=05/hour=12/20130305_1230_00.gz
100
+ </pre>
101
+
102
+ + `redshift_host` (required) : the end point(or hostname) of your Amazon Redshift cluster.
103
+
104
+ + `redshift_port` (required) : port number.
105
+
106
+ + `redshift_dbname` (required) : database name.
107
+
108
+ + `redshift_user` (required) : user name.
109
+
110
+ + `redshift_password` (required) : password for the user name.
111
+
112
+ + `redshift_tablename` (required) : table name to store data.
113
+
114
+ + `redshift_schemaname` : schema name to store data. By default, this option is not set and find table without schema as your own search_path.
115
+
116
+ + `file_type` : file format of the source data. `csv`, `tsv`, `msgpack` or `json` are available.
117
+
118
+ + `delimiter` : delimiter of the source data. This option will be ignored if `file_type` is specified.
119
+
120
+ + `buffer_type` : buffer type.
121
+
122
+ + `buffer_path` : path prefix of the files to buffer logs.
123
+
124
+ + `flush_interval` : flush interval.
125
+
126
+ + `buffer_chunk_limit` : limit buffer size to chunk.
127
+
128
+ + `utc` : utc time zone. This parameter affects `timestamp_key_format`.
129
+
130
+ ## Logging examples
131
+ ```ruby
132
+ # examples by fluent-logger
133
+ require 'fluent-logger'
134
+ log = Fluent::Logger::FluentLogger.new(nil, :host => 'localhost', :port => 24224)
135
+
136
+ # file_type: csv
137
+ log.post('your.tag', :log => "12345,12345")
138
+
139
+ # file_type: tsv
140
+ log.post('your.tag', :log => "12345\t12345")
141
+
142
+ # file_type: json
143
+ require 'json'
144
+ log.post('your.tag', :log => { :user_id => 12345, :data_id => 12345 }.to_json)
145
+
146
+ # file_type: msgpack
147
+ log.post('your.tag', :user_id => 12345, :data_id => 12345)
148
+ ```
149
+
150
+ ## License
151
+
152
+ Copyright (c) 2013 [Hapyrus Inc](http://hapyrus.com)
153
+
154
+ [Apache License, Version 2.0](http://www.apache.org/licenses/LICENSE-2.0)
@@ -0,0 +1,16 @@
1
+ require "bundler"
2
+ Bundler::GemHelper.install_tasks
3
+ require 'rake/testtask'
4
+
5
+ Rake::TestTask.new(:test) do |test|
6
+ test.libs << 'lib' << 'test'
7
+ test.test_files = FileList['test/plugin/*.rb']
8
+ test.verbose = true
9
+ end
10
+
11
+ task :coverage do |t|
12
+ ENV['COVERAGE'] = '1'
13
+ Rake::Task["test"].invoke
14
+ end
15
+
16
+ task :default => [:build]
data/VERSION ADDED
@@ -0,0 +1 @@
1
+ 0.0.3
@@ -0,0 +1,25 @@
1
+ # -*- encoding: utf-8 -*-
2
+ $:.push File.expand_path('../lib', __FILE__)
3
+
4
+ Gem::Specification.new do |gem|
5
+ gem.name = "fluent-plugin-redshift-kwarter"
6
+ gem.version = File.read("VERSION").strip
7
+ gem.authors = ["Masashi Miyazaki", "Daniel Koepke"]
8
+ gem.email = ["mmasashi@gmail.com", "daniel.koepke@kwarter.com"]
9
+ gem.description = %q{Amazon Redshift output plugin for Fluentd (updated by Kwarter)}
10
+ gem.summary = gem.description
11
+ gem.homepage = "https://github.com/kwarter/fluent-plugin-redshift"
12
+ gem.has_rdoc = false
13
+
14
+ gem.files = `git ls-files`.split($/)
15
+ gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
16
+ gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
17
+ gem.require_paths = ["lib"]
18
+
19
+ gem.add_dependency "fluentd", "~> 0.10.0"
20
+ gem.add_dependency "aws-sdk", ">= 1.6.3"
21
+ gem.add_dependency "pg", "~> 0.14.0"
22
+ gem.add_development_dependency "rake"
23
+ gem.add_development_dependency "simplecov", ">= 0.5.4"
24
+ gem.add_development_dependency "flexmock", ">= 1.3.1"
25
+ end
@@ -0,0 +1,288 @@
1
+ module Fluent
2
+
3
+
4
+ class RedshiftOutput < BufferedOutput
5
+ Fluent::Plugin.register_output('redshift', self)
6
+
7
+ # ignore load table error. (invalid data format)
8
+ IGNORE_REDSHIFT_ERROR_REGEXP = /^ERROR: Load into table '[^']+' failed\./
9
+
10
+ def initialize
11
+ super
12
+ require 'aws-sdk'
13
+ require 'zlib'
14
+ require 'time'
15
+ require 'tempfile'
16
+ require 'pg'
17
+ require 'json'
18
+ require 'csv'
19
+ end
20
+
21
+ config_param :record_log_tag, :string, :default => 'log'
22
+ # s3
23
+ config_param :aws_key_id, :string
24
+ config_param :aws_sec_key, :string
25
+ config_param :s3_bucket, :string
26
+ config_param :s3_endpoint, :string, :default => nil
27
+ config_param :path, :string, :default => ""
28
+ config_param :timestamp_key_format, :string, :default => 'year=%Y/month=%m/day=%d/hour=%H/%Y%m%d-%H%M'
29
+ config_param :utc, :bool, :default => false
30
+ # redshift
31
+ config_param :redshift_host, :string
32
+ config_param :redshift_port, :integer, :default => 5439
33
+ config_param :redshift_dbname, :string
34
+ config_param :redshift_user, :string
35
+ config_param :redshift_password, :string
36
+ config_param :redshift_tablename, :string
37
+ config_param :redshift_schemaname, :string, :default => nil
38
+ config_param :redshift_copy_base_options, :string , :default => "FILLRECORD ACCEPTANYDATE TRUNCATECOLUMNS"
39
+ config_param :redshift_copy_options, :string , :default => nil
40
+ # file format
41
+ config_param :file_type, :string, :default => nil # json, tsv, csv, msgpack
42
+ config_param :delimiter, :string, :default => nil
43
+ # for debug
44
+ config_param :log_suffix, :string, :default => ''
45
+
46
+ def configure(conf)
47
+ super
48
+ @path = "#{@path}/" unless @path.end_with?('/') # append last slash
49
+ @path = @path[1..-1] if @path.start_with?('/') # remove head slash
50
+ @utc = true if conf['utc']
51
+ @db_conf = {
52
+ host:@redshift_host,
53
+ port:@redshift_port,
54
+ dbname:@redshift_dbname,
55
+ user:@redshift_user,
56
+ password:@redshift_password
57
+ }
58
+ @delimiter = determine_delimiter(@file_type) if @delimiter.nil? or @delimiter.empty?
59
+ $log.debug format_log("redshift file_type:#{@file_type} delimiter:'#{@delimiter}'")
60
+ @copy_sql_template = "copy #{table_name_with_schema} from '%s' CREDENTIALS 'aws_access_key_id=#{@aws_key_id};aws_secret_access_key=%s' delimiter '#{@delimiter}' GZIP ESCAPE #{@redshift_copy_base_options} #{@redshift_copy_options};"
61
+ end
62
+
63
+ def start
64
+ super
65
+ # init s3 conf
66
+ options = {
67
+ :access_key_id => @aws_key_id,
68
+ :secret_access_key => @aws_sec_key
69
+ }
70
+ options[:s3_endpoint] = @s3_endpoint if @s3_endpoint
71
+ @s3 = AWS::S3.new(options)
72
+ @bucket = @s3.buckets[@s3_bucket]
73
+ end
74
+
75
+ def format(tag, time, record)
76
+ if json?
77
+ record.to_msgpack
78
+ elsif msgpack?
79
+ { @record_log_tag => record }.to_msgpack
80
+ else
81
+ "#{record[@record_log_tag]}\n"
82
+ end
83
+ end
84
+
85
+ def write(chunk)
86
+ $log.debug format_log("start creating gz.")
87
+
88
+ # create a gz file
89
+ tmp = Tempfile.new("s3-")
90
+ tmp =
91
+ if json? || msgpack?
92
+ create_gz_file_from_structured_data(tmp, chunk, @delimiter)
93
+ else
94
+ create_gz_file_from_flat_data(tmp, chunk)
95
+ end
96
+
97
+ # no data -> skip
98
+ unless tmp
99
+ $log.debug format_log("received no valid data. ")
100
+ return false # for debug
101
+ end
102
+
103
+ # create a file path with time format
104
+ s3path = create_s3path(@bucket, @path)
105
+
106
+ # upload gz to s3
107
+ @bucket.objects[s3path].write(Pathname.new(tmp.path),
108
+ :acl => :bucket_owner_full_control)
109
+
110
+ # close temp file
111
+ tmp.close!
112
+
113
+ # copy gz on s3 to redshift
114
+ s3_uri = "s3://#{@s3_bucket}/#{s3path}"
115
+ sql = @copy_sql_template % [s3_uri, @aws_sec_key]
116
+ $log.debug format_log("start copying. s3_uri=#{s3_uri}")
117
+ conn = nil
118
+ begin
119
+ conn = PG.connect(@db_conf)
120
+ conn.exec(sql)
121
+ $log.info format_log("completed copying to redshift. s3_uri=#{s3_uri}")
122
+ rescue PG::Error => e
123
+ $log.error format_log("failed to copy data into redshift. s3_uri=#{s3_uri}"), :error=>e.to_s
124
+ raise e unless e.to_s =~ IGNORE_REDSHIFT_ERROR_REGEXP
125
+ return false # for debug
126
+ ensure
127
+ conn.close rescue nil if conn
128
+ end
129
+ true # for debug
130
+ end
131
+
132
+ protected
133
+ def format_log(message)
134
+ (@log_suffix and not @log_suffix.empty?) ? "#{message} #{@log_suffix}" : message
135
+ end
136
+
137
+ private
138
+ def json?
139
+ @file_type == 'json'
140
+ end
141
+
142
+ def msgpack?
143
+ @file_type == 'msgpack'
144
+ end
145
+
146
+ def create_gz_file_from_flat_data(dst_file, chunk)
147
+ gzw = nil
148
+ begin
149
+ gzw = Zlib::GzipWriter.new(dst_file)
150
+ chunk.write_to(gzw)
151
+ ensure
152
+ gzw.close rescue nil if gzw
153
+ end
154
+ dst_file
155
+ end
156
+
157
+ def create_gz_file_from_structured_data(dst_file, chunk, delimiter)
158
+ # fetch the table definition from redshift
159
+ redshift_table_columns = fetch_table_columns
160
+ if redshift_table_columns == nil
161
+ raise "failed to fetch the redshift table definition."
162
+ elsif redshift_table_columns.empty?
163
+ $log.warn format_log("no table on redshift. table_name=#{table_name_with_schema}")
164
+ return nil
165
+ end
166
+
167
+ # convert json to tsv format text
168
+ gzw = nil
169
+ begin
170
+ gzw = Zlib::GzipWriter.new(dst_file)
171
+ chunk.msgpack_each do |record|
172
+ begin
173
+ hash = json? ? json_to_hash(record[@record_log_tag]) : record[@record_log_tag]
174
+ tsv_text = hash_to_table_text(redshift_table_columns, hash, delimiter)
175
+ gzw.write(tsv_text) if tsv_text and not tsv_text.empty?
176
+ rescue => e
177
+ if json?
178
+ $log.error format_log("failed to create table text from json. text=(#{record[@record_log_tag]})"), :error=>$!.to_s
179
+ else
180
+ $log.error format_log("failed to create table text from msgpack. text=(#{record[@record_log_tag]})"), :error=>$!.to_s
181
+ end
182
+
183
+ $log.error_backtrace
184
+ end
185
+ end
186
+ return nil unless gzw.pos > 0
187
+ ensure
188
+ gzw.close rescue nil if gzw
189
+ end
190
+ dst_file
191
+ end
192
+
193
+ def determine_delimiter(file_type)
194
+ case file_type
195
+ when 'json', 'msgpack', 'tsv'
196
+ "\t"
197
+ when "csv"
198
+ ','
199
+ else
200
+ raise Fluent::ConfigError, "Invalid file_type:#{file_type}."
201
+ end
202
+ end
203
+
204
+ def fetch_table_columns
205
+ conn = PG.connect(@db_conf)
206
+ begin
207
+ columns = nil
208
+ conn.exec(fetch_columns_sql_with_schema) do |result|
209
+ columns = result.collect{|row| row['column_name']}
210
+ end
211
+ columns
212
+ ensure
213
+ conn.close rescue nil
214
+ end
215
+ end
216
+
217
+ def fetch_columns_sql_with_schema
218
+ @fetch_columns_sql ||= if @redshift_schemaname
219
+ "select column_name from INFORMATION_SCHEMA.COLUMNS where table_schema = '#{@redshift_schemaname}' and table_name = '#{@redshift_tablename}' order by ordinal_position;"
220
+ else
221
+ "select column_name from INFORMATION_SCHEMA.COLUMNS where table_name = '#{@redshift_tablename}' order by ordinal_position;"
222
+ end
223
+ end
224
+
225
+ def json_to_hash(json_text)
226
+ return nil if json_text.to_s.empty?
227
+
228
+ JSON.parse(json_text)
229
+ rescue => e
230
+ $log.warn format_log("failed to parse json. "), :error => e.to_s
231
+ end
232
+
233
+ def hash_to_table_text(redshift_table_columns, hash, delimiter)
234
+ return "" unless hash
235
+
236
+ # extract values from hash
237
+ val_list = redshift_table_columns.collect do |cn|
238
+ val = hash[cn]
239
+ val = JSON.generate(val) if val.kind_of?(Hash) or val.kind_of?(Array)
240
+
241
+ if val.to_s.empty?
242
+ nil
243
+ else
244
+ val.to_s
245
+ end
246
+ end
247
+
248
+ if val_list.all?{|v| v.nil? or v.empty?}
249
+ $log.warn format_log("no data match for table columns on redshift. data=#{hash} table_columns=#{redshift_table_columns}")
250
+ return ""
251
+ end
252
+
253
+ generate_line_with_delimiter(val_list, delimiter)
254
+ end
255
+
256
+ def generate_line_with_delimiter(val_list, delimiter)
257
+ val_list = val_list.collect do |val|
258
+ if val.nil? or val.empty?
259
+ ""
260
+ else
261
+ val.gsub(/\\/, "\\\\\\").gsub(/\t/, "\\\t").gsub(/\n/, "\\\n") # escape tab, newline and backslash
262
+ end
263
+ end
264
+ val_list.join(delimiter) + "\n"
265
+ end
266
+
267
+ def create_s3path(bucket, path)
268
+ timestamp_key = (@utc) ? Time.now.utc.strftime(@timestamp_key_format) : Time.now.strftime(@timestamp_key_format)
269
+ i = 0
270
+ begin
271
+ suffix = "_#{'%02d' % i}"
272
+ s3path = "#{path}#{timestamp_key}#{suffix}.gz"
273
+ i += 1
274
+ end while bucket.objects[s3path].exists?
275
+ s3path
276
+ end
277
+
278
+ def table_name_with_schema
279
+ @table_name_with_schema ||= if @redshift_schemaname
280
+ "#{@redshift_schemaname}.#{@redshift_tablename}"
281
+ else
282
+ @redshift_tablename
283
+ end
284
+ end
285
+ end
286
+
287
+
288
+ end
@@ -0,0 +1,526 @@
1
+ require 'test_helper'
2
+
3
+ require 'fluent/test'
4
+ require 'fluent/plugin/out_redshift'
5
+ require 'flexmock/test_unit'
6
+ require 'zlib'
7
+
8
+
9
+ class RedshiftOutputTest < Test::Unit::TestCase
10
+ def setup
11
+ require 'aws-sdk'
12
+ require 'pg'
13
+ require 'csv'
14
+ Fluent::Test.setup
15
+ end
16
+
17
+ CONFIG_BASE= %[
18
+ aws_key_id test_key_id
19
+ aws_sec_key test_sec_key
20
+ s3_bucket test_bucket
21
+ path log
22
+ redshift_host test_host
23
+ redshift_dbname test_db
24
+ redshift_user test_user
25
+ redshift_password test_password
26
+ redshift_tablename test_table
27
+ buffer_type memory
28
+ utc
29
+ log_suffix id:5 host:localhost
30
+ ]
31
+ CONFIG_CSV= %[
32
+ #{CONFIG_BASE}
33
+ file_type csv
34
+ ]
35
+ CONFIG_TSV= %[
36
+ #{CONFIG_BASE}
37
+ file_type tsv
38
+ ]
39
+ CONFIG_JSON = %[
40
+ #{CONFIG_BASE}
41
+ file_type json
42
+ ]
43
+ CONFIG_JSON_WITH_SCHEMA = %[
44
+ #{CONFIG_BASE}
45
+ redshift_schemaname test_schema
46
+ file_type json
47
+ ]
48
+ CONFIG_MSGPACK = %[
49
+ #{CONFIG_BASE}
50
+ file_type msgpack
51
+ ]
52
+ CONFIG_PIPE_DELIMITER= %[
53
+ #{CONFIG_BASE}
54
+ delimiter |
55
+ ]
56
+ CONFIG_PIPE_DELIMITER_WITH_NAME= %[
57
+ #{CONFIG_BASE}
58
+ file_type pipe
59
+ delimiter |
60
+ ]
61
+ CONFIG=CONFIG_CSV
62
+
63
+ RECORD_CSV_A = {"log" => %[val_a,val_b,val_c,val_d]}
64
+ RECORD_CSV_B = {"log" => %[val_e,val_f,val_g,val_h]}
65
+ RECORD_TSV_A = {"log" => %[val_a\tval_b\tval_c\tval_d]}
66
+ RECORD_TSV_B = {"log" => %[val_e\tval_f\tval_g\tval_h]}
67
+ RECORD_JSON_A = {"log" => %[{"key_a" : "val_a", "key_b" : "val_b"}]}
68
+ RECORD_JSON_B = {"log" => %[{"key_c" : "val_c", "key_d" : "val_d"}]}
69
+ RECORD_MSGPACK_A = {"key_a" => "val_a", "key_b" => "val_b"}
70
+ RECORD_MSGPACK_B = {"key_c" => "val_c", "key_d" => "val_d"}
71
+ DEFAULT_TIME = Time.parse("2013-03-06 12:15:02 UTC").to_i
72
+
73
+ def create_driver(conf = CONFIG, tag='test.input')
74
+ Fluent::Test::BufferedOutputTestDriver.new(Fluent::RedshiftOutput, tag).configure(conf)
75
+ end
76
+
77
+ def create_driver_no_write(conf = CONFIG, tag='test.input')
78
+ Fluent::Test::BufferedOutputTestDriver.new(Fluent::RedshiftOutput, tag) do
79
+ def write(chunk)
80
+ chunk.read
81
+ end
82
+ end.configure(conf)
83
+ end
84
+
85
+ def test_configure
86
+ assert_raise(Fluent::ConfigError) {
87
+ d = create_driver('')
88
+ }
89
+ assert_raise(Fluent::ConfigError) {
90
+ d = create_driver(CONFIG_BASE)
91
+ }
92
+ d = create_driver(CONFIG_CSV)
93
+ assert_equal "test_key_id", d.instance.aws_key_id
94
+ assert_equal "test_sec_key", d.instance.aws_sec_key
95
+ assert_equal "test_bucket", d.instance.s3_bucket
96
+ assert_equal "log/", d.instance.path
97
+ assert_equal "test_host", d.instance.redshift_host
98
+ assert_equal 5439, d.instance.redshift_port
99
+ assert_equal "test_db", d.instance.redshift_dbname
100
+ assert_equal "test_user", d.instance.redshift_user
101
+ assert_equal "test_password", d.instance.redshift_password
102
+ assert_equal "test_table", d.instance.redshift_tablename
103
+ assert_equal nil, d.instance.redshift_schemaname
104
+ assert_equal "FILLRECORD ACCEPTANYDATE TRUNCATECOLUMNS", d.instance.redshift_copy_base_options
105
+ assert_equal nil, d.instance.redshift_copy_options
106
+ assert_equal "csv", d.instance.file_type
107
+ assert_equal ",", d.instance.delimiter
108
+ assert_equal true, d.instance.utc
109
+ end
110
+ def test_configure_with_schemaname
111
+ d = create_driver(CONFIG_JSON_WITH_SCHEMA)
112
+ assert_equal "test_schema", d.instance.redshift_schemaname
113
+ end
114
+ def test_configure_localtime
115
+ d = create_driver(CONFIG_CSV.gsub(/ *utc */, ''))
116
+ assert_equal false, d.instance.utc
117
+ end
118
+ def test_configure_no_path
119
+ d = create_driver(CONFIG_CSV.gsub(/ *path *.+$/, ''))
120
+ assert_equal "", d.instance.path
121
+ end
122
+ def test_configure_root_path
123
+ d = create_driver(CONFIG_CSV.gsub(/ *path *.+$/, 'path /'))
124
+ assert_equal "", d.instance.path
125
+ end
126
+ def test_configure_path_with_slash
127
+ d = create_driver(CONFIG_CSV.gsub(/ *path *.+$/, 'path log/'))
128
+ assert_equal "log/", d.instance.path
129
+ end
130
+ def test_configure_path_starts_with_slash
131
+ d = create_driver(CONFIG_CSV.gsub(/ *path *.+$/, 'path /log/'))
132
+ assert_equal "log/", d.instance.path
133
+ end
134
+ def test_configure_path_starts_with_slash_without_last_slash
135
+ d = create_driver(CONFIG_CSV.gsub(/ *path *.+$/, 'path /log'))
136
+ assert_equal "log/", d.instance.path
137
+ end
138
+ def test_configure_tsv
139
+ d1 = create_driver(CONFIG_TSV)
140
+ assert_equal "tsv", d1.instance.file_type
141
+ assert_equal "\t", d1.instance.delimiter
142
+ end
143
+ def test_configure_json
144
+ d2 = create_driver(CONFIG_JSON)
145
+ assert_equal "json", d2.instance.file_type
146
+ assert_equal "\t", d2.instance.delimiter
147
+ end
148
+ def test_configure_msgpack
149
+ d2 = create_driver(CONFIG_MSGPACK)
150
+ assert_equal "msgpack", d2.instance.file_type
151
+ assert_equal "\t", d2.instance.delimiter
152
+ end
153
+ def test_configure_original_file_type
154
+ d3 = create_driver(CONFIG_PIPE_DELIMITER)
155
+ assert_equal nil, d3.instance.file_type
156
+ assert_equal "|", d3.instance.delimiter
157
+
158
+ d4 = create_driver(CONFIG_PIPE_DELIMITER_WITH_NAME)
159
+ assert_equal "pipe", d4.instance.file_type
160
+ assert_equal "|", d4.instance.delimiter
161
+ end
162
+ def test_configure_no_log_suffix
163
+ d = create_driver(CONFIG_CSV.gsub(/ *log_suffix *.+$/, ''))
164
+ assert_equal "", d.instance.log_suffix
165
+ end
166
+
167
+ def emit_csv(d)
168
+ d.emit(RECORD_CSV_A, DEFAULT_TIME)
169
+ d.emit(RECORD_CSV_B, DEFAULT_TIME)
170
+ end
171
+ def emit_tsv(d)
172
+ d.emit(RECORD_TSV_A, DEFAULT_TIME)
173
+ d.emit(RECORD_TSV_B, DEFAULT_TIME)
174
+ end
175
+ def emit_json(d)
176
+ d.emit(RECORD_JSON_A, DEFAULT_TIME)
177
+ d.emit(RECORD_JSON_B, DEFAULT_TIME)
178
+ end
179
+ def emit_msgpack(d)
180
+ d.emit(RECORD_MSGPACK_A, DEFAULT_TIME)
181
+ d.emit(RECORD_MSGPACK_B, DEFAULT_TIME)
182
+ end
183
+
184
+ def test_format_csv
185
+ d_csv = create_driver_no_write(CONFIG_CSV)
186
+ emit_csv(d_csv)
187
+ d_csv.expect_format RECORD_CSV_A['log'] + "\n"
188
+ d_csv.expect_format RECORD_CSV_B['log'] + "\n"
189
+ d_csv.run
190
+ end
191
+ def test_format_tsv
192
+ d_tsv = create_driver_no_write(CONFIG_TSV)
193
+ emit_tsv(d_tsv)
194
+ d_tsv.expect_format RECORD_TSV_A['log'] + "\n"
195
+ d_tsv.expect_format RECORD_TSV_B['log'] + "\n"
196
+ d_tsv.run
197
+ end
198
+ def test_format_json
199
+ d_json = create_driver_no_write(CONFIG_JSON)
200
+ emit_json(d_json)
201
+ d_json.expect_format RECORD_JSON_A.to_msgpack
202
+ d_json.expect_format RECORD_JSON_B.to_msgpack
203
+ d_json.run
204
+ end
205
+
206
+ def test_format_msgpack
207
+ d_msgpack = create_driver_no_write(CONFIG_MSGPACK)
208
+ emit_msgpack(d_msgpack)
209
+ d_msgpack.expect_format({ 'log' => RECORD_MSGPACK_A }.to_msgpack)
210
+ d_msgpack.expect_format({ 'log' => RECORD_MSGPACK_B }.to_msgpack)
211
+ d_msgpack.run
212
+ end
213
+
214
+ class PGConnectionMock
215
+ def initialize(options = {})
216
+ @return_keys = options[:return_keys] || ['key_a', 'key_b', 'key_c', 'key_d', 'key_e', 'key_f', 'key_g', 'key_h']
217
+ @target_schema = options[:schemaname] || nil
218
+ @target_table = options[:tablename] || 'test_table'
219
+ end
220
+
221
+ def expected_column_list_query
222
+ if @target_schema
223
+ /\Aselect column_name from INFORMATION_SCHEMA.COLUMNS where table_schema = '#{@target_schema}' and table_name = '#{@target_table}'/
224
+ else
225
+ /\Aselect column_name from INFORMATION_SCHEMA.COLUMNS where table_name = '#{@target_table}'/
226
+ end
227
+ end
228
+
229
+ def expected_copy_query
230
+ if @target_schema
231
+ /\Acopy #{@target_schema}.#{@target_table} from/
232
+ else
233
+ /\Acopy #{@target_table} from/
234
+ end
235
+ end
236
+
237
+ def exec(sql, &block)
238
+ if block_given?
239
+ if sql =~ expected_column_list_query
240
+ yield @return_keys.collect{|key| {'column_name' => key}}
241
+ else
242
+ yield []
243
+ end
244
+ else
245
+ unless sql =~ expected_copy_query
246
+ error = PG::Error.new("ERROR: Load into table '#{@target_table}' failed. Check 'stl_load_errors' system table for details.")
247
+ error.result = "ERROR: Load into table '#{@target_table}' failed. Check 'stl_load_errors' system table for details."
248
+ raise error
249
+ end
250
+ end
251
+ end
252
+
253
+ def close
254
+ end
255
+ end
256
+
257
+ def setup_pg_mock
258
+ # create mock of PG
259
+ def PG.connect(dbinfo)
260
+ return PGConnectionMock.new
261
+ end
262
+ end
263
+
264
+ def setup_s3_mock(expected_data)
265
+ current_time = Time.now
266
+
267
+ # create mock of s3 object
268
+ s3obj = flexmock(AWS::S3::S3Object)
269
+ s3obj.should_receive(:exists?).with_any_args.and_return { false }
270
+ s3obj.should_receive(:write).with(
271
+ # pathname
272
+ on { |pathname|
273
+ data = nil
274
+ pathname.open { |f|
275
+ gz = Zlib::GzipReader.new(f)
276
+ data = gz.read
277
+ gz.close
278
+ }
279
+ assert_equal expected_data, data
280
+ },
281
+ :acl => :bucket_owner_full_control
282
+ ).and_return { true }
283
+
284
+ # create mock of s3 object collection
285
+ s3obj_col = flexmock(AWS::S3::ObjectCollection)
286
+ s3obj_col.should_receive(:[]).with(
287
+ on { |key|
288
+ expected_key = current_time.utc.strftime("log/year=%Y/month=%m/day=%d/hour=%H/%Y%m%d-%H%M_00.gz")
289
+ key == expected_key
290
+ }).
291
+ and_return {
292
+ s3obj
293
+ }
294
+
295
+ # create mock of s3 bucket
296
+ flexmock(AWS::S3::Bucket).new_instances do |bucket|
297
+ bucket.should_receive(:objects).with_any_args.
298
+ and_return {
299
+ s3obj_col
300
+ }
301
+ end
302
+ end
303
+
304
+ def setup_tempfile_mock_to_be_closed
305
+ flexmock(Tempfile).new_instances.should_receive(:close!).at_least.once
306
+ end
307
+
308
+ def setup_mocks(expected_data)
309
+ setup_pg_mock
310
+ setup_s3_mock(expected_data) end
311
+
312
+ def test_write_with_csv
313
+ setup_mocks(%[val_a,val_b,val_c,val_d\nval_e,val_f,val_g,val_h\n])
314
+ setup_tempfile_mock_to_be_closed
315
+ d_csv = create_driver
316
+ emit_csv(d_csv)
317
+ assert_equal true, d_csv.run
318
+ end
319
+
320
+ def test_write_with_json
321
+ setup_mocks(%[val_a\tval_b\t\t\t\t\t\t\n\t\tval_c\tval_d\t\t\t\t\n])
322
+ setup_tempfile_mock_to_be_closed
323
+ d_json = create_driver(CONFIG_JSON)
324
+ emit_json(d_json)
325
+ assert_equal true, d_json.run
326
+ end
327
+
328
+ def test_write_with_json_hash_value
329
+ setup_mocks("val_a\t{\"foo\":\"var\"}\t\t\t\t\t\t\n\t\tval_c\tval_d\t\t\t\t\n")
330
+ d_json = create_driver(CONFIG_JSON)
331
+ d_json.emit({"log" => %[{"key_a" : "val_a", "key_b" : {"foo" : "var"}}]} , DEFAULT_TIME)
332
+ d_json.emit(RECORD_JSON_B, DEFAULT_TIME)
333
+ assert_equal true, d_json.run
334
+ end
335
+
336
+ def test_write_with_json_array_value
337
+ setup_mocks("val_a\t[\"foo\",\"var\"]\t\t\t\t\t\t\n\t\tval_c\tval_d\t\t\t\t\n")
338
+ d_json = create_driver(CONFIG_JSON)
339
+ d_json.emit({"log" => %[{"key_a" : "val_a", "key_b" : ["foo", "var"]}]} , DEFAULT_TIME)
340
+ d_json.emit(RECORD_JSON_B, DEFAULT_TIME)
341
+ assert_equal true, d_json.run
342
+ end
343
+
344
+ def test_write_with_json_including_tab_newline_quote
345
+ setup_mocks("val_a_with_\\\t_tab_\\\n_newline\tval_b_with_\\\\_quote\t\t\t\t\t\t\n\t\tval_c\tval_d\t\t\t\t\n")
346
+ d_json = create_driver(CONFIG_JSON)
347
+ d_json.emit({"log" => %[{"key_a" : "val_a_with_\\t_tab_\\n_newline", "key_b" : "val_b_with_\\\\_quote"}]} , DEFAULT_TIME)
348
+ d_json.emit(RECORD_JSON_B, DEFAULT_TIME)
349
+ assert_equal true, d_json.run
350
+ end
351
+
352
+ def test_write_with_json_no_data
353
+ setup_mocks("")
354
+ d_json = create_driver(CONFIG_JSON)
355
+ d_json.emit("", DEFAULT_TIME)
356
+ d_json.emit("", DEFAULT_TIME)
357
+ assert_equal false, d_json.run
358
+ end
359
+
360
+ def test_write_with_json_invalid_one_line
361
+ setup_mocks(%[\t\tval_c\tval_d\t\t\t\t\n])
362
+ d_json = create_driver(CONFIG_JSON)
363
+ d_json.emit({"log" => %[}}]}, DEFAULT_TIME)
364
+ d_json.emit(RECORD_JSON_B, DEFAULT_TIME)
365
+ assert_equal true, d_json.run
366
+ end
367
+
368
+ def test_write_with_json_no_available_data
369
+ setup_mocks(%[val_a\tval_b\t\t\t\t\t\t\n])
370
+ d_json = create_driver(CONFIG_JSON)
371
+ d_json.emit(RECORD_JSON_A, DEFAULT_TIME)
372
+ d_json.emit({"log" => %[{"key_o" : "val_o", "key_p" : "val_p"}]}, DEFAULT_TIME)
373
+ assert_equal true, d_json.run
374
+ end
375
+
376
+ def test_write_with_msgpack
377
+ setup_mocks(%[val_a\tval_b\t\t\t\t\t\t\n\t\tval_c\tval_d\t\t\t\t\n])
378
+ d_msgpack = create_driver(CONFIG_MSGPACK)
379
+ emit_msgpack(d_msgpack)
380
+ assert_equal true, d_msgpack.run
381
+ end
382
+
383
+ def test_write_with_msgpack_hash_value
384
+ setup_mocks("val_a\t{\"foo\":\"var\"}\t\t\t\t\t\t\n\t\tval_c\tval_d\t\t\t\t\n")
385
+ d_msgpack = create_driver(CONFIG_MSGPACK)
386
+ d_msgpack.emit({"key_a" => "val_a", "key_b" => {"foo" => "var"}} , DEFAULT_TIME)
387
+ d_msgpack.emit(RECORD_MSGPACK_B, DEFAULT_TIME)
388
+ assert_equal true, d_msgpack.run
389
+ end
390
+
391
+ def test_write_with_msgpack_array_value
392
+ setup_mocks("val_a\t[\"foo\",\"var\"]\t\t\t\t\t\t\n\t\tval_c\tval_d\t\t\t\t\n")
393
+ d_msgpack = create_driver(CONFIG_MSGPACK)
394
+ d_msgpack.emit({"key_a" => "val_a", "key_b" => ["foo", "var"]} , DEFAULT_TIME)
395
+ d_msgpack.emit(RECORD_MSGPACK_B, DEFAULT_TIME)
396
+ assert_equal true, d_msgpack.run
397
+ end
398
+
399
+ def test_write_with_msgpack_including_tab_newline_quote
400
+ setup_mocks("val_a_with_\\\t_tab_\\\n_newline\tval_b_with_\\\\_quote\t\t\t\t\t\t\n\t\tval_c\tval_d\t\t\t\t\n")
401
+ d_msgpack = create_driver(CONFIG_MSGPACK)
402
+ d_msgpack.emit({"key_a" => "val_a_with_\t_tab_\n_newline", "key_b" => "val_b_with_\\_quote"} , DEFAULT_TIME)
403
+ d_msgpack.emit(RECORD_MSGPACK_B, DEFAULT_TIME)
404
+ assert_equal true, d_msgpack.run
405
+ end
406
+
407
+ def test_write_with_msgpack_no_data
408
+ setup_mocks("")
409
+ d_msgpack = create_driver(CONFIG_MSGPACK)
410
+ d_msgpack.emit({}, DEFAULT_TIME)
411
+ d_msgpack.emit({}, DEFAULT_TIME)
412
+ assert_equal false, d_msgpack.run
413
+ end
414
+
415
+ def test_write_with_msgpack_no_available_data
416
+ setup_mocks(%[val_a\tval_b\t\t\t\t\t\t\n])
417
+ d_msgpack = create_driver(CONFIG_MSGPACK)
418
+ d_msgpack.emit(RECORD_MSGPACK_A, DEFAULT_TIME)
419
+ d_msgpack.emit({"key_o" => "val_o", "key_p" => "val_p"}, DEFAULT_TIME)
420
+ assert_equal true, d_msgpack.run
421
+ end
422
+
423
+ def test_write_redshift_connection_error
424
+ def PG.connect(dbinfo)
425
+ return Class.new do
426
+ def initialize(return_keys=[]); end
427
+ def exec(sql)
428
+ raise PG::Error, "redshift connection error"
429
+ end
430
+ def close; end
431
+ end.new
432
+ end
433
+ setup_s3_mock(%[val_a,val_b,val_c,val_d\nval_e,val_f,val_g,val_h\n])
434
+
435
+ d_csv = create_driver
436
+ emit_csv(d_csv)
437
+ assert_raise(PG::Error) {
438
+ d_csv.run
439
+ }
440
+ end
441
+
442
+ def test_write_redshift_load_error
443
+ PG::Error.module_eval { attr_accessor :result}
444
+ def PG.connect(dbinfo)
445
+ return Class.new do
446
+ def initialize(return_keys=[]); end
447
+ def exec(sql)
448
+ error = PG::Error.new("ERROR: Load into table 'apache_log' failed. Check 'stl_load_errors' system table for details.")
449
+ error.result = "ERROR: Load into table 'apache_log' failed. Check 'stl_load_errors' system table for details."
450
+ raise error
451
+ end
452
+ def close; end
453
+ end.new
454
+ end
455
+ setup_s3_mock(%[val_a,val_b,val_c,val_d\nval_e,val_f,val_g,val_h\n])
456
+
457
+ d_csv = create_driver
458
+ emit_csv(d_csv)
459
+ assert_equal false, d_csv.run
460
+ end
461
+
462
+ def test_write_with_json_redshift_connection_error
463
+ def PG.connect(dbinfo)
464
+ return Class.new do
465
+ def initialize(return_keys=[]); end
466
+ def exec(sql, &block)
467
+ error = PG::Error.new("redshift connection error")
468
+ raise error
469
+ end
470
+ def close; end
471
+ end.new
472
+ end
473
+ setup_s3_mock(%[val_a,val_b,val_c,val_d\nval_e,val_f,val_g,val_h\n])
474
+
475
+ d_json = create_driver(CONFIG_JSON)
476
+ emit_json(d_json)
477
+ assert_raise(PG::Error) {
478
+ d_json.run
479
+ }
480
+ end
481
+
482
+ def test_write_with_json_no_table_on_redshift
483
+ def PG.connect(dbinfo)
484
+ return Class.new do
485
+ def initialize(return_keys=[]); end
486
+ def exec(sql, &block)
487
+ yield [] if block_given?
488
+ end
489
+ def close; end
490
+ end.new
491
+ end
492
+ setup_s3_mock(%[val_a,val_b,val_c,val_d\nval_e,val_f,val_g,val_h\n])
493
+
494
+ d_json = create_driver(CONFIG_JSON)
495
+ emit_json(d_json)
496
+ assert_equal false, d_json.run
497
+ end
498
+
499
+ def test_write_with_json_failed_to_get_columns
500
+ def PG.connect(dbinfo)
501
+ return Class.new do
502
+ def initialize(return_keys=[]); end
503
+ def exec(sql, &block)
504
+ end
505
+ def close; end
506
+ end.new
507
+ end
508
+ setup_s3_mock("")
509
+
510
+ d_json = create_driver(CONFIG_JSON)
511
+ emit_json(d_json)
512
+ assert_raise(RuntimeError, "failed to fetch the redshift table definition.") {
513
+ d_json.run
514
+ }
515
+ end
516
+
517
+ def test_write_with_json_fetch_column_with_schema
518
+ def PG.connect(dbinfo)
519
+ return PGConnectionMock.new(:schemaname => 'test_schema')
520
+ end
521
+ setup_s3_mock(%[val_a\tval_b\t\t\t\t\t\t\n\t\tval_c\tval_d\t\t\t\t\n])
522
+ d_json = create_driver(CONFIG_JSON_WITH_SCHEMA)
523
+ emit_json(d_json)
524
+ assert_equal true, d_json.run
525
+ end
526
+ end
@@ -0,0 +1,8 @@
1
+ if ENV['COVERAGE']
2
+ require 'simplecov'
3
+ SimpleCov.start do
4
+ add_filter 'test/'
5
+ add_filter 'pkg/'
6
+ add_filter 'vendor/'
7
+ end
8
+ end
metadata ADDED
@@ -0,0 +1,140 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: fluent-plugin-redshift-kwarter
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.0.3
5
+ platform: ruby
6
+ authors:
7
+ - Masashi Miyazaki
8
+ - Daniel Koepke
9
+ autorequire:
10
+ bindir: bin
11
+ cert_chain: []
12
+ date: 2013-10-17 00:00:00.000000000 Z
13
+ dependencies:
14
+ - !ruby/object:Gem::Dependency
15
+ name: fluentd
16
+ requirement: !ruby/object:Gem::Requirement
17
+ requirements:
18
+ - - ~>
19
+ - !ruby/object:Gem::Version
20
+ version: 0.10.0
21
+ type: :runtime
22
+ prerelease: false
23
+ version_requirements: !ruby/object:Gem::Requirement
24
+ requirements:
25
+ - - ~>
26
+ - !ruby/object:Gem::Version
27
+ version: 0.10.0
28
+ - !ruby/object:Gem::Dependency
29
+ name: aws-sdk
30
+ requirement: !ruby/object:Gem::Requirement
31
+ requirements:
32
+ - - '>='
33
+ - !ruby/object:Gem::Version
34
+ version: 1.6.3
35
+ type: :runtime
36
+ prerelease: false
37
+ version_requirements: !ruby/object:Gem::Requirement
38
+ requirements:
39
+ - - '>='
40
+ - !ruby/object:Gem::Version
41
+ version: 1.6.3
42
+ - !ruby/object:Gem::Dependency
43
+ name: pg
44
+ requirement: !ruby/object:Gem::Requirement
45
+ requirements:
46
+ - - ~>
47
+ - !ruby/object:Gem::Version
48
+ version: 0.14.0
49
+ type: :runtime
50
+ prerelease: false
51
+ version_requirements: !ruby/object:Gem::Requirement
52
+ requirements:
53
+ - - ~>
54
+ - !ruby/object:Gem::Version
55
+ version: 0.14.0
56
+ - !ruby/object:Gem::Dependency
57
+ name: rake
58
+ requirement: !ruby/object:Gem::Requirement
59
+ requirements:
60
+ - - '>='
61
+ - !ruby/object:Gem::Version
62
+ version: '0'
63
+ type: :development
64
+ prerelease: false
65
+ version_requirements: !ruby/object:Gem::Requirement
66
+ requirements:
67
+ - - '>='
68
+ - !ruby/object:Gem::Version
69
+ version: '0'
70
+ - !ruby/object:Gem::Dependency
71
+ name: simplecov
72
+ requirement: !ruby/object:Gem::Requirement
73
+ requirements:
74
+ - - '>='
75
+ - !ruby/object:Gem::Version
76
+ version: 0.5.4
77
+ type: :development
78
+ prerelease: false
79
+ version_requirements: !ruby/object:Gem::Requirement
80
+ requirements:
81
+ - - '>='
82
+ - !ruby/object:Gem::Version
83
+ version: 0.5.4
84
+ - !ruby/object:Gem::Dependency
85
+ name: flexmock
86
+ requirement: !ruby/object:Gem::Requirement
87
+ requirements:
88
+ - - '>='
89
+ - !ruby/object:Gem::Version
90
+ version: 1.3.1
91
+ type: :development
92
+ prerelease: false
93
+ version_requirements: !ruby/object:Gem::Requirement
94
+ requirements:
95
+ - - '>='
96
+ - !ruby/object:Gem::Version
97
+ version: 1.3.1
98
+ description: Amazon Redshift output plugin for Fluentd (updated by Kwarter)
99
+ email:
100
+ - mmasashi@gmail.com
101
+ - daniel.koepke@kwarter.com
102
+ executables: []
103
+ extensions: []
104
+ extra_rdoc_files: []
105
+ files:
106
+ - .gitignore
107
+ - Gemfile
108
+ - README.md
109
+ - Rakefile
110
+ - VERSION
111
+ - fluent-plugin-redshift-kwarter.gemspec
112
+ - lib/fluent/plugin/out_redshift.rb
113
+ - test/plugin/test_out_redshift.rb
114
+ - test/test_helper.rb
115
+ homepage: https://github.com/kwarter/fluent-plugin-redshift
116
+ licenses: []
117
+ metadata: {}
118
+ post_install_message:
119
+ rdoc_options: []
120
+ require_paths:
121
+ - lib
122
+ required_ruby_version: !ruby/object:Gem::Requirement
123
+ requirements:
124
+ - - '>='
125
+ - !ruby/object:Gem::Version
126
+ version: '0'
127
+ required_rubygems_version: !ruby/object:Gem::Requirement
128
+ requirements:
129
+ - - '>='
130
+ - !ruby/object:Gem::Version
131
+ version: '0'
132
+ requirements: []
133
+ rubyforge_project:
134
+ rubygems_version: 2.1.9
135
+ signing_key:
136
+ specification_version: 4
137
+ summary: Amazon Redshift output plugin for Fluentd (updated by Kwarter)
138
+ test_files:
139
+ - test/plugin/test_out_redshift.rb
140
+ - test/test_helper.rb