fluent-plugin-redshift-auto 0.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/.gitignore ADDED
@@ -0,0 +1,19 @@
1
+ *.gem
2
+ *.rbc
3
+ *.swp
4
+ .bundle
5
+ .config
6
+ .yardoc
7
+ Gemfile.lock
8
+ InstalledFiles
9
+ _yardoc
10
+ coverage
11
+ doc/
12
+ lib/bundler/man
13
+ pkg
14
+ rdoc
15
+ spec/reports
16
+ test/tmp
17
+ test/version_tmp
18
+ tmp
19
+ vendor/
data/Gemfile ADDED
@@ -0,0 +1,3 @@
1
+ source 'https://rubygems.org'
2
+
3
+ gemspec
data/README.md ADDED
@@ -0,0 +1,156 @@
1
+ Amazon Redshift output plugin for Fluentd
2
+ ========
3
+
4
+ ## Overview
5
+
6
+ Amazon Redshift output plugin uploads event logs to an Amazon Redshift Cluster. Supportted data formats are csv, tsv and json. An S3 bucket and a Redshift Cluster are required to use this plugin.
7
+
8
+ ## Installation
9
+
10
+ gem install fluent-plugin-redshift
11
+
12
+ ## Configuration
13
+
14
+ Format:
15
+
16
+ <match my.tag>
17
+ type redshift
18
+
19
+ # s3 (for copying data to redshift)
20
+ aws_key_id YOUR_AWS_KEY_ID
21
+ aws_sec_key YOUR_AWS_SECRET_KEY
22
+ s3_bucket YOUR_S3_BUCKET
23
+ s3_endpoint YOUR_S3_BUCKET_END_POINT
24
+ path YOUR_S3_PATH
25
+ timestamp_key_format year=%Y/month=%m/day=%d/hour=%H/%Y%m%d-%H%M
26
+
27
+ # redshift
28
+ redshift_host YOUR_AMAZON_REDSHIFT_CLUSTER_END_POINT
29
+ redshift_port YOUR_AMAZON_REDSHIFT_CLUSTER_PORT
30
+ redshift_dbname YOUR_AMAZON_REDSHIFT_CLUSTER_DATABASE_NAME
31
+ redshift_user YOUR_AMAZON_REDSHIFT_CLUSTER_USER_NAME
32
+ redshift_password YOUR_AMAZON_REDSHIFT_CLUSTER_PASSWORD
33
+ redshift_schemaname YOUR_AMAZON_REDSHIFT_CLUSTER_TARGET_SCHEMA_NAME
34
+ redshift_tablename YOUR_AMAZON_REDSHIFT_CLUSTER_TARGET_TABLE_NAME
35
+ file_type [tsv|csv|json|msgpack]
36
+ varchar_length ALL_COLUMNS_VARCHAR_LENGTH
37
+
38
+
39
+ # buffer
40
+ buffer_type file
41
+ buffer_path /var/log/fluent/redshift
42
+ flush_interval 15m
43
+ buffer_chunk_limit 1g
44
+ </match>
45
+
46
+ Example (watch and upload json formatted apache log):
47
+
48
+ <source>
49
+ type tail
50
+ path redshift_test.json
51
+ pos_file redshift_test_json.pos
52
+ tag redshift.json
53
+ format /^(?<log>.*)$/
54
+ </source>
55
+
56
+ <match redshift.json>
57
+ type redshift
58
+
59
+ # s3 (for copying data to redshift)
60
+ aws_key_id YOUR_AWS_KEY_ID
61
+ aws_sec_key YOUR_AWS_SECRET_KEY
62
+ s3_bucket hapyrus-example
63
+ s3_endpoint s3.amazonaws.com
64
+ path path/on/s3/apache_json_log/
65
+ timestamp_key_format year=%Y/month=%m/day=%d/hour=%H/%Y%m%d-%H%M
66
+
67
+ # redshift
68
+ redshift_host xxx-yyy-zzz.xxxxxxxxxx.us-east-1.redshift.amazonaws.com
69
+ redshift_port 5439
70
+ redshift_dbname fluent-redshift-test
71
+ redshift_user fluent
72
+ redshift_password fluent-password
73
+ redshift_tablename apache_log
74
+ file_type json
75
+
76
+ # buffer
77
+ buffer_type file
78
+ buffer_path /var/log/fluent/redshift
79
+ flush_interval 15m
80
+ buffer_chunk_limit 1g
81
+ <match>
82
+
83
+ + `type` (required) : The value must be `redshift`.
84
+
85
+ + `aws_key_id` (required) : AWS access key id to access s3 bucket.
86
+
87
+ + `aws_sec_key` (required) : AWS securet key id to access s3 bucket.
88
+
89
+ + `s3_bucket` (required) : s3 bucket name. S3 bucket must be same as the region of your Redshift cluster.
90
+
91
+ + `s3_endpoint` : s3 endpoint.
92
+
93
+ + `path` (required) : s3 path to input.
94
+
95
+ + `timestamp_key_format` : The format of the object keys. It can include date-format directives.
96
+
97
+ - Default parameter is "year=%Y/month=%m/day=%d/hour=%H/%Y%m%d-%H%M"
98
+ - For example, the s3 path is as following with the above example configration.
99
+ <pre>
100
+ hapyrus-example/apache_json_log/year=2013/month=03/day=05/hour=12/20130305_1215_00.gz
101
+ hapyrus-example/apache_json_log/year=2013/month=03/day=05/hour=12/20130305_1230_00.gz
102
+ </pre>
103
+
104
+ + `redshift_host` (required) : the end point(or hostname) of your Amazon Redshift cluster.
105
+
106
+ + `redshift_port` (required) : port number.
107
+
108
+ + `redshift_dbname` (required) : database name.
109
+
110
+ + `redshift_user` (required) : user name.
111
+
112
+ + `redshift_password` (required) : password for the user name.
113
+
114
+ + `redshift_tablename` (required) : table name to store data.
115
+
116
+ + `redshift_schemaname` : schema name to store data. By default, this option is not set and find table without schema as your own search_path.
117
+
118
+ + `file_type` : file format of the source data. `csv`, `tsv`, `msgpack` or `json` are available.
119
+
120
+ + `delimiter` : delimiter of the source data. This option will be ignored if `file_type` is specified.
121
+
122
+ + `buffer_type` : buffer type.
123
+
124
+ + `buffer_path` : path prefix of the files to buffer logs.
125
+
126
+ + `flush_interval` : flush interval.
127
+
128
+ + `buffer_chunk_limit` : limit buffer size to chunk.
129
+
130
+ + `utc` : utc time zone. This parameter affects `timestamp_key_format`.
131
+
132
+ ## Logging examples
133
+ ```ruby
134
+ # examples by fluent-logger
135
+ require 'fluent-logger'
136
+ log = Fluent::Logger::FluentLogger.new(nil, :host => 'localhost', :port => 24224)
137
+
138
+ # file_type: csv
139
+ log.post('your.tag', :log => "12345,12345")
140
+
141
+ # file_type: tsv
142
+ log.post('your.tag', :log => "12345\t12345")
143
+
144
+ # file_type: json
145
+ require 'json'
146
+ log.post('your.tag', :log => { :user_id => 12345, :data_id => 12345 }.to_json)
147
+
148
+ # file_type: msgpack
149
+ log.post('your.tag', :user_id => 12345, :data_id => 12345)
150
+ ```
151
+
152
+ ## License
153
+
154
+ Copyright (c) 2013 [Hapyrus Inc](http://hapyrus.com)
155
+
156
+ [Apache License, Version 2.0](http://www.apache.org/licenses/LICENSE-2.0)
data/Rakefile ADDED
@@ -0,0 +1,16 @@
1
+ require "bundler"
2
+ Bundler::GemHelper.install_tasks
3
+ require 'rake/testtask'
4
+
5
+ Rake::TestTask.new(:test) do |test|
6
+ test.libs << 'lib' << 'test'
7
+ test.test_files = FileList['test/plugin/*.rb']
8
+ test.verbose = true
9
+ end
10
+
11
+ task :coverage do |t|
12
+ ENV['COVERAGE'] = '1'
13
+ Rake::Task["test"].invoke
14
+ end
15
+
16
+ task :default => [:build]
data/VERSION ADDED
@@ -0,0 +1 @@
1
+ 0.0.2
@@ -0,0 +1,25 @@
1
+ # -*- encoding: utf-8 -*-
2
+ $:.push File.expand_path('../lib', __FILE__)
3
+
4
+ Gem::Specification.new do |gem|
5
+ gem.name = "fluent-plugin-redshift-auto"
6
+ gem.version = File.read("VERSION").strip
7
+ gem.authors = ["Takashi Honda"]
8
+ gem.email = ["takashi.0628.honda@gmail.com"]
9
+ gem.description = %q{Amazon Redshift output plugin for Fluentd with creating table}
10
+ gem.summary = gem.description
11
+ gem.homepage = "https://github.com/takashi-honda/fluent-plugin-redshift-auto"
12
+ gem.has_rdoc = false
13
+
14
+ gem.files = `git ls-files`.split($/)
15
+ gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
16
+ gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
17
+ gem.require_paths = ["lib"]
18
+
19
+ gem.add_dependency "fluentd", "~> 0.10.0"
20
+ gem.add_dependency "aws-sdk", ">= 1.6.3"
21
+ gem.add_dependency "pg", "~> 0.14.0"
22
+ gem.add_development_dependency "rake"
23
+ gem.add_development_dependency "simplecov", ">= 0.5.4"
24
+ gem.add_development_dependency "flexmock", ">= 1.3.1"
25
+ end
@@ -0,0 +1,301 @@
1
+ module Fluent
2
+
3
+
4
+ class RedshiftOutput < BufferedOutput
5
+ Fluent::Plugin.register_output('redshift_auto', self)
6
+
7
+ # ignore load table error. (invalid data format)
8
+ IGNORE_REDSHIFT_ERROR_REGEXP = /^ERROR: Load into table '[^']+' failed\./
9
+
10
+ def initialize
11
+ super
12
+ require 'aws-sdk'
13
+ require 'zlib'
14
+ require 'time'
15
+ require 'tempfile'
16
+ require 'pg'
17
+ require 'json'
18
+ require 'csv'
19
+ end
20
+
21
+ config_param :record_log_tag, :string, :default => 'log'
22
+ # s3
23
+ config_param :aws_key_id, :string
24
+ config_param :aws_sec_key, :string
25
+ config_param :s3_bucket, :string
26
+ config_param :s3_endpoint, :string, :default => nil
27
+ config_param :path, :string, :default => ""
28
+ config_param :timestamp_key_format, :string, :default => 'year=%Y/month=%m/day=%d/hour=%H/%Y%m%d-%H%M'
29
+ config_param :utc, :bool, :default => false
30
+ # redshift
31
+ config_param :redshift_host, :string
32
+ config_param :redshift_port, :integer, :default => 5439
33
+ config_param :redshift_dbname, :string
34
+ config_param :redshift_user, :string
35
+ config_param :redshift_password, :string
36
+ config_param :redshift_tablename, :string
37
+ # file format
38
+ config_param :file_type, :string, :default => nil # json, tsv, csv
39
+ config_param :delimiter, :string, :default => nil
40
+ # for debug
41
+ config_param :log_suffix, :string, :default => ''
42
+ # for varchar length
43
+ config_param :varchar_length, :integer, :default => 255
44
+
45
+ def configure(conf)
46
+ super
47
+ @path = "#{@path}/" if /.+[^\/]$/ =~ @path
48
+ @path = "" if @path == "/"
49
+ @utc = true if conf['utc']
50
+ @db_conf = {
51
+ host:@redshift_host,
52
+ port:@redshift_port,
53
+ dbname:@redshift_dbname,
54
+ user:@redshift_user,
55
+ password:@redshift_password
56
+ }
57
+ @delimiter = determine_delimiter(@file_type) if @delimiter.nil? or @delimiter.empty?
58
+ $log.debug format_log("redshift file_type:#{@file_type} delimiter:'#{@delimiter}'")
59
+ @copy_sql_template = "copy #{@redshift_tablename} from '%s' CREDENTIALS 'aws_access_key_id=#{@aws_key_id};aws_secret_access_key=%s' delimiter '#{@delimiter}' GZIP TRUNCATECOLUMNS ESCAPE FILLRECORD ACCEPTANYDATE;"
60
+ end
61
+
62
+ def start
63
+ super
64
+ # init s3 conf
65
+ $log.debug format_log("redshift file_type:#{@file_type} delimiter:'#{@delimiter}'")
66
+ options = {
67
+ :access_key_id => @aws_key_id,
68
+ :secret_access_key => @aws_sec_key
69
+ }
70
+ options[:s3_endpoint] = @s3_endpoint if @s3_endpoint
71
+ @s3 = AWS::S3.new(options)
72
+ @bucket = @s3.buckets[@s3_bucket]
73
+ end
74
+
75
+ def format(tag, time, record)
76
+ record = JSON.generate(record)
77
+ if json?
78
+ json = JSON.parse(record)
79
+ cols = []
80
+ json.each do |key,val|
81
+ cols.push("#{key}")
82
+ end
83
+ make_table_from_tag_name(tag, cols)
84
+ end
85
+
86
+ (json?) ? record.to_msgpack : "#{record[@record_log_tag]}\n"
87
+ end
88
+
89
+ def write(chunk)
90
+ $log.debug format_log("start creating gz.")
91
+
92
+ # create a gz file
93
+ tmp = Tempfile.new("s3-")
94
+ tmp = (json?) ? create_gz_file_from_json(tmp, chunk, @delimiter)
95
+ : create_gz_file_from_msgpack(tmp, chunk)
96
+
97
+ # no data -> skip
98
+ unless tmp
99
+ $log.debug format_log("received no valid data. ")
100
+ return false # for debug
101
+ end
102
+
103
+ # create a file path with time format
104
+ s3path = create_s3path(@bucket, @path)
105
+
106
+ # upload gz to s3
107
+ @bucket.objects[s3path].write(Pathname.new(tmp.path),
108
+ :acl => :bucket_owner_full_control)
109
+ # copy gz on s3 to redshift
110
+ s3_uri = "s3://#{@s3_bucket}/#{s3path}"
111
+ sql = @copy_sql_template % [s3_uri, @aws_sec_key]
112
+ $log.debug format_log("start copying. s3_uri=#{s3_uri}")
113
+ conn = nil
114
+ begin
115
+ conn = PG.connect(@db_conf)
116
+ conn.exec(sql)
117
+ $log.info format_log("completed copying to redshift. s3_uri=#{s3_uri}")
118
+ rescue PG::Error => e
119
+ $log.error format_log("failed to copy data into redshift. s3_uri=#{s3_uri}"), :error=>e.to_s
120
+ raise e unless e.to_s =~ IGNORE_REDSHIFT_ERROR_REGEXP
121
+ return false # for debug
122
+ ensure
123
+ conn.close rescue nil if conn
124
+ end
125
+ true # for debug
126
+ end
127
+
128
+ protected
129
+ def format_log(message)
130
+ (@log_suffix and not @log_suffix.empty?) ? "#{message} #{@log_suffix}" : message
131
+ end
132
+
133
+ private
134
+ def json?
135
+ @file_type == 'json'
136
+ end
137
+
138
+ def create_gz_file_from_msgpack(dst_file, chunk)
139
+ gzw = nil
140
+ begin
141
+ gzw = Zlib::GzipWriter.new(dst_file)
142
+ chunk.write_to(gzw)
143
+ ensure
144
+ gzw.close rescue nil if gzw
145
+ end
146
+ dst_file
147
+ end
148
+
149
+ def create_gz_file_from_json(dst_file, chunk, delimiter)
150
+ # fetch the table definition from redshift
151
+ redshift_table_columns = fetch_table_columns
152
+ if redshift_table_columns == nil
153
+ raise "failed to fetch the redshift table definition."
154
+ elsif redshift_table_columns.empty?
155
+ $log.warn format_log("no table on redshift. table_name=#{@redshift_tablename}")
156
+ return nil
157
+ end
158
+
159
+ # convert json to tsv format text
160
+ gzw = nil
161
+ begin
162
+ gzw = Zlib::GzipWriter.new(dst_file)
163
+ $log.debug format_log("redshift file_type:#{@file_type} delimiter:'#{@delimiter}'")
164
+ chunk.msgpack_each do |record|
165
+ begin
166
+ tsv_text = json_to_table_text(redshift_table_columns, record, delimiter)
167
+ gzw.write(tsv_text) if tsv_text and not tsv_text.empty?
168
+ rescue => e
169
+ $log.error format_log("failed to create table text from json. text=(#{record[@record_log_tag]})"), :error=>$!.to_s
170
+ $log.error_backtrace
171
+ end
172
+ end
173
+ return nil unless gzw.pos > 0
174
+ ensure
175
+ gzw.close rescue nil if gzw
176
+ end
177
+ dst_file
178
+ end
179
+
180
+ def determine_delimiter(file_type)
181
+ case file_type
182
+ when 'json', 'tsv'
183
+ "\t"
184
+ when "csv"
185
+ ','
186
+ else
187
+ raise Fluent::ConfigError, "Invalid file_type:#{file_type}."
188
+ end
189
+ end
190
+
191
+ def fetch_table_columns
192
+ fetch_columns_sql = "select column_name from INFORMATION_SCHEMA.COLUMNS where table_name = '#{@redshift_tablename}' order by ordinal_position;"
193
+ conn = PG.connect(@db_conf)
194
+ begin
195
+ columns = nil
196
+ conn.exec(fetch_columns_sql) do |result|
197
+ columns = result.collect{|row| row['column_name']}
198
+ end
199
+ columns
200
+ ensure
201
+ conn.close rescue nil
202
+ end
203
+ end
204
+
205
+ def json_to_table_text(redshift_table_columns, json_text, delimiter)
206
+ return "" if json_text.nil? or json_text.empty?
207
+
208
+ # parse json text
209
+ json_obj = nil
210
+ begin
211
+ json_obj = JSON.parse(json_text)
212
+ rescue => e
213
+ $log.warn format_log("failed to parse json. "), :error=>e.to_s
214
+ return ""
215
+ end
216
+ return "" unless json_obj
217
+
218
+ # extract values from json
219
+ val_list = redshift_table_columns.collect do |cn|
220
+ val = json_obj[cn]
221
+ val = nil unless val and not val.to_s.empty?
222
+ val = JSON.generate(val) if val.kind_of?(Hash) or val.kind_of?(Array)
223
+ val.to_s unless val.nil?
224
+ end
225
+ if val_list.all?{|v| v.nil? or v.empty?}
226
+ $log.warn format_log("no data match for table columns on redshift. json_text=#{json_text} table_columns=#{redshift_table_columns}")
227
+ return ""
228
+ end
229
+
230
+ generate_line_with_delimiter(val_list, delimiter)
231
+ end
232
+
233
+ def generate_line_with_delimiter(val_list, delimiter)
234
+ val_list = val_list.collect do |val|
235
+ if val.nil? or val.empty?
236
+ ""
237
+ else
238
+ val.gsub(/\\/, "\\\\\\").gsub(/\t/, "\\\t").gsub(/\n/, "\\\n") # escape tab, newline and backslash
239
+ end
240
+ end
241
+ val_list.join(delimiter) + "\n"
242
+ end
243
+
244
+ def create_s3path(bucket, path)
245
+ timestamp_key = (@utc) ? Time.now.utc.strftime(@timestamp_key_format) : Time.now.strftime(@timestamp_key_format)
246
+ i = 0
247
+ begin
248
+ suffix = "_#{'%02d' % i}"
249
+ s3path = "#{path}#{timestamp_key}#{suffix}.gz"
250
+ i += 1
251
+ end while bucket.objects[s3path].exists?
252
+ s3path
253
+ end
254
+
255
+ def make_table_from_tag_name(tag, columns_arr)
256
+
257
+ conn = PG.connect(@db_conf)
258
+ sql = "SELECT table_name FROM INFORMATION_SCHEMA.TABLES WHERE table_name LIKE '#{tag}';"
259
+
260
+ cnt = 0
261
+ conn.exec(sql).each do |r|
262
+ cnt = cnt + 1
263
+ end
264
+
265
+ if cnt >= 1
266
+ return
267
+ end
268
+
269
+ cols = ""
270
+ for col_name in columns_arr do
271
+ cols = cols + "\"#{col_name}\" varchar(#{varchar_length}),"
272
+ end
273
+
274
+ len = cols.length
275
+ cols.slice!(len - 1)
276
+
277
+ if @redshift_schemaname
278
+ table_name = "#{@redshift_schemaname}.#{tag}"
279
+ else
280
+ table_name = "#{tag}"
281
+ end
282
+
283
+ sql = "CREATE TABLE #{table_name} (#{cols});"
284
+ begin
285
+ conn.exec(sql)
286
+ rescue PGError => e
287
+ $log.error format_log("failed CREATE TABLE table_name: #{table_name}")
288
+ $log.error format_log("class: " + e.class + " msg: " + e.message)
289
+ rescue => e
290
+ $log.error format_log("failed CREATE TABLE table_name: #{table_name}")
291
+ $log.error format_log("class: " + e.class + " msg: " + e.message)
292
+ end
293
+ conn.close
294
+ $log.info format_log("TABLE CREATED: => #{sql}")
295
+ end
296
+
297
+
298
+ end
299
+
300
+
301
+ end
@@ -0,0 +1,526 @@
1
+ require 'test_helper'
2
+
3
+ require 'fluent/test'
4
+ require 'fluent/plugin/out_redshift_auto'
5
+ require 'flexmock/test_unit'
6
+ require 'zlib'
7
+
8
+
9
+ class RedshiftOutputTest < Test::Unit::TestCase
10
+ def setup
11
+ require 'aws-sdk'
12
+ require 'pg'
13
+ require 'csv'
14
+ Fluent::Test.setup
15
+ end
16
+
17
+ CONFIG_BASE= %[
18
+ aws_key_id test_key_id
19
+ aws_sec_key test_sec_key
20
+ s3_bucket test_bucket
21
+ path log
22
+ redshift_host test_host
23
+ redshift_dbname test_db
24
+ redshift_user test_user
25
+ redshift_password test_password
26
+ redshift_tablename test_table
27
+ buffer_type memory
28
+ utc
29
+ log_suffix id:5 host:localhost
30
+ ]
31
+ CONFIG_CSV= %[
32
+ #{CONFIG_BASE}
33
+ file_type csv
34
+ ]
35
+ CONFIG_TSV= %[
36
+ #{CONFIG_BASE}
37
+ file_type tsv
38
+ ]
39
+ CONFIG_JSON = %[
40
+ #{CONFIG_BASE}
41
+ file_type json
42
+ ]
43
+ CONFIG_JSON_WITH_SCHEMA = %[
44
+ #{CONFIG_BASE}
45
+ redshift_schemaname test_schema
46
+ file_type json
47
+ ]
48
+ CONFIG_MSGPACK = %[
49
+ #{CONFIG_BASE}
50
+ file_type msgpack
51
+ ]
52
+ CONFIG_PIPE_DELIMITER= %[
53
+ #{CONFIG_BASE}
54
+ delimiter |
55
+ ]
56
+ CONFIG_PIPE_DELIMITER_WITH_NAME= %[
57
+ #{CONFIG_BASE}
58
+ file_type pipe
59
+ delimiter |
60
+ ]
61
+ CONFIG=CONFIG_CSV
62
+
63
+ RECORD_CSV_A = {"log" => %[val_a,val_b,val_c,val_d]}
64
+ RECORD_CSV_B = {"log" => %[val_e,val_f,val_g,val_h]}
65
+ RECORD_TSV_A = {"log" => %[val_a\tval_b\tval_c\tval_d]}
66
+ RECORD_TSV_B = {"log" => %[val_e\tval_f\tval_g\tval_h]}
67
+ RECORD_JSON_A = {"log" => %[{"key_a" : "val_a", "key_b" : "val_b"}]}
68
+ RECORD_JSON_B = {"log" => %[{"key_c" : "val_c", "key_d" : "val_d"}]}
69
+ RECORD_MSGPACK_A = {"key_a" => "val_a", "key_b" => "val_b"}
70
+ RECORD_MSGPACK_B = {"key_c" => "val_c", "key_d" => "val_d"}
71
+ DEFAULT_TIME = Time.parse("2013-03-06 12:15:02 UTC").to_i
72
+
73
+ def create_driver(conf = CONFIG, tag='test.input')
74
+ Fluent::Test::BufferedOutputTestDriver.new(Fluent::RedshiftOutput, tag).configure(conf)
75
+ end
76
+
77
+ def create_driver_no_write(conf = CONFIG, tag='test.input')
78
+ Fluent::Test::BufferedOutputTestDriver.new(Fluent::RedshiftOutput, tag) do
79
+ def write(chunk)
80
+ chunk.read
81
+ end
82
+ end.configure(conf)
83
+ end
84
+
85
+ def test_configure
86
+ assert_raise(Fluent::ConfigError) {
87
+ d = create_driver('')
88
+ }
89
+ assert_raise(Fluent::ConfigError) {
90
+ d = create_driver(CONFIG_BASE)
91
+ }
92
+ d = create_driver(CONFIG_CSV)
93
+ assert_equal "test_key_id", d.instance.aws_key_id
94
+ assert_equal "test_sec_key", d.instance.aws_sec_key
95
+ assert_equal "test_bucket", d.instance.s3_bucket
96
+ assert_equal "log/", d.instance.path
97
+ assert_equal "test_host", d.instance.redshift_host
98
+ assert_equal 5439, d.instance.redshift_port
99
+ assert_equal "test_db", d.instance.redshift_dbname
100
+ assert_equal "test_user", d.instance.redshift_user
101
+ assert_equal "test_password", d.instance.redshift_password
102
+ assert_equal "test_table", d.instance.redshift_tablename
103
+ assert_equal nil, d.instance.redshift_schemaname
104
+ assert_equal "FILLRECORD ACCEPTANYDATE TRUNCATECOLUMNS", d.instance.redshift_copy_base_options
105
+ assert_equal nil, d.instance.redshift_copy_options
106
+ assert_equal "csv", d.instance.file_type
107
+ assert_equal ",", d.instance.delimiter
108
+ assert_equal true, d.instance.utc
109
+ end
110
+ def test_configure_with_schemaname
111
+ d = create_driver(CONFIG_JSON_WITH_SCHEMA)
112
+ assert_equal "test_schema", d.instance.redshift_schemaname
113
+ end
114
+ def test_configure_localtime
115
+ d = create_driver(CONFIG_CSV.gsub(/ *utc */, ''))
116
+ assert_equal false, d.instance.utc
117
+ end
118
+ def test_configure_no_path
119
+ d = create_driver(CONFIG_CSV.gsub(/ *path *.+$/, ''))
120
+ assert_equal "", d.instance.path
121
+ end
122
+ def test_configure_root_path
123
+ d = create_driver(CONFIG_CSV.gsub(/ *path *.+$/, 'path /'))
124
+ assert_equal "", d.instance.path
125
+ end
126
+ def test_configure_path_with_slash
127
+ d = create_driver(CONFIG_CSV.gsub(/ *path *.+$/, 'path log/'))
128
+ assert_equal "log/", d.instance.path
129
+ end
130
+ def test_configure_path_starts_with_slash
131
+ d = create_driver(CONFIG_CSV.gsub(/ *path *.+$/, 'path /log/'))
132
+ assert_equal "log/", d.instance.path
133
+ end
134
+ def test_configure_path_starts_with_slash_without_last_slash
135
+ d = create_driver(CONFIG_CSV.gsub(/ *path *.+$/, 'path /log'))
136
+ assert_equal "log/", d.instance.path
137
+ end
138
+ def test_configure_tsv
139
+ d1 = create_driver(CONFIG_TSV)
140
+ assert_equal "tsv", d1.instance.file_type
141
+ assert_equal "\t", d1.instance.delimiter
142
+ end
143
+ def test_configure_json
144
+ d2 = create_driver(CONFIG_JSON)
145
+ assert_equal "json", d2.instance.file_type
146
+ assert_equal "\t", d2.instance.delimiter
147
+ end
148
+ def test_configure_msgpack
149
+ d2 = create_driver(CONFIG_MSGPACK)
150
+ assert_equal "msgpack", d2.instance.file_type
151
+ assert_equal "\t", d2.instance.delimiter
152
+ end
153
+ def test_configure_original_file_type
154
+ d3 = create_driver(CONFIG_PIPE_DELIMITER)
155
+ assert_equal nil, d3.instance.file_type
156
+ assert_equal "|", d3.instance.delimiter
157
+
158
+ d4 = create_driver(CONFIG_PIPE_DELIMITER_WITH_NAME)
159
+ assert_equal "pipe", d4.instance.file_type
160
+ assert_equal "|", d4.instance.delimiter
161
+ end
162
+ def test_configure_no_log_suffix
163
+ d = create_driver(CONFIG_CSV.gsub(/ *log_suffix *.+$/, ''))
164
+ assert_equal "", d.instance.log_suffix
165
+ end
166
+
167
+ def emit_csv(d)
168
+ d.emit(RECORD_CSV_A, DEFAULT_TIME)
169
+ d.emit(RECORD_CSV_B, DEFAULT_TIME)
170
+ end
171
+ def emit_tsv(d)
172
+ d.emit(RECORD_TSV_A, DEFAULT_TIME)
173
+ d.emit(RECORD_TSV_B, DEFAULT_TIME)
174
+ end
175
+ def emit_json(d)
176
+ d.emit(RECORD_JSON_A, DEFAULT_TIME)
177
+ d.emit(RECORD_JSON_B, DEFAULT_TIME)
178
+ end
179
+ def emit_msgpack(d)
180
+ d.emit(RECORD_MSGPACK_A, DEFAULT_TIME)
181
+ d.emit(RECORD_MSGPACK_B, DEFAULT_TIME)
182
+ end
183
+
184
+ def test_format_csv
185
+ d_csv = create_driver_no_write(CONFIG_CSV)
186
+ emit_csv(d_csv)
187
+ d_csv.expect_format RECORD_CSV_A['log'] + "\n"
188
+ d_csv.expect_format RECORD_CSV_B['log'] + "\n"
189
+ d_csv.run
190
+ end
191
+ def test_format_tsv
192
+ d_tsv = create_driver_no_write(CONFIG_TSV)
193
+ emit_tsv(d_tsv)
194
+ d_tsv.expect_format RECORD_TSV_A['log'] + "\n"
195
+ d_tsv.expect_format RECORD_TSV_B['log'] + "\n"
196
+ d_tsv.run
197
+ end
198
+ def test_format_json
199
+ d_json = create_driver_no_write(CONFIG_JSON)
200
+ emit_json(d_json)
201
+ d_json.expect_format RECORD_JSON_A.to_msgpack
202
+ d_json.expect_format RECORD_JSON_B.to_msgpack
203
+ d_json.run
204
+ end
205
+
206
+ def test_format_msgpack
207
+ d_msgpack = create_driver_no_write(CONFIG_MSGPACK)
208
+ emit_msgpack(d_msgpack)
209
+ d_msgpack.expect_format({ 'log' => RECORD_MSGPACK_A }.to_msgpack)
210
+ d_msgpack.expect_format({ 'log' => RECORD_MSGPACK_B }.to_msgpack)
211
+ d_msgpack.run
212
+ end
213
+
214
+ class PGConnectionMock
215
+ def initialize(options = {})
216
+ @return_keys = options[:return_keys] || ['key_a', 'key_b', 'key_c', 'key_d', 'key_e', 'key_f', 'key_g', 'key_h']
217
+ @target_schema = options[:schemaname] || nil
218
+ @target_table = options[:tablename] || 'test_table'
219
+ end
220
+
221
+ def expected_column_list_query
222
+ if @target_schema
223
+ /\Aselect column_name from INFORMATION_SCHEMA.COLUMNS where table_schema = '#{@target_schema}' and table_name = '#{@target_table}'/
224
+ else
225
+ /\Aselect column_name from INFORMATION_SCHEMA.COLUMNS where table_name = '#{@target_table}'/
226
+ end
227
+ end
228
+
229
+ def expected_copy_query
230
+ if @target_schema
231
+ /\Acopy #{@target_schema}.#{@target_table} from/
232
+ else
233
+ /\Acopy #{@target_table} from/
234
+ end
235
+ end
236
+
237
+ def exec(sql, &block)
238
+ if block_given?
239
+ if sql =~ expected_column_list_query
240
+ yield @return_keys.collect{|key| {'column_name' => key}}
241
+ else
242
+ yield []
243
+ end
244
+ else
245
+ unless sql =~ expected_copy_query
246
+ error = PG::Error.new("ERROR: Load into table '#{@target_table}' failed. Check 'stl_load_errors' system table for details.")
247
+ error.result = "ERROR: Load into table '#{@target_table}' failed. Check 'stl_load_errors' system table for details."
248
+ raise error
249
+ end
250
+ end
251
+ end
252
+
253
+ def close
254
+ end
255
+ end
256
+
257
+ def setup_pg_mock
258
+ # create mock of PG
259
+ def PG.connect(dbinfo)
260
+ return PGConnectionMock.new
261
+ end
262
+ end
263
+
264
+ def setup_s3_mock(expected_data)
265
+ current_time = Time.now
266
+
267
+ # create mock of s3 object
268
+ s3obj = flexmock(AWS::S3::S3Object)
269
+ s3obj.should_receive(:exists?).with_any_args.and_return { false }
270
+ s3obj.should_receive(:write).with(
271
+ # pathname
272
+ on { |pathname|
273
+ data = nil
274
+ pathname.open { |f|
275
+ gz = Zlib::GzipReader.new(f)
276
+ data = gz.read
277
+ gz.close
278
+ }
279
+ assert_equal expected_data, data
280
+ },
281
+ :acl => :bucket_owner_full_control
282
+ ).and_return { true }
283
+
284
+ # create mock of s3 object collection
285
+ s3obj_col = flexmock(AWS::S3::ObjectCollection)
286
+ s3obj_col.should_receive(:[]).with(
287
+ on { |key|
288
+ expected_key = current_time.utc.strftime("log/year=%Y/month=%m/day=%d/hour=%H/%Y%m%d-%H%M_00.gz")
289
+ key == expected_key
290
+ }).
291
+ and_return {
292
+ s3obj
293
+ }
294
+
295
+ # create mock of s3 bucket
296
+ flexmock(AWS::S3::Bucket).new_instances do |bucket|
297
+ bucket.should_receive(:objects).with_any_args.
298
+ and_return {
299
+ s3obj_col
300
+ }
301
+ end
302
+ end
303
+
304
+ def setup_tempfile_mock_to_be_closed
305
+ flexmock(Tempfile).new_instances.should_receive(:close!).at_least.once
306
+ end
307
+
308
+ def setup_mocks(expected_data)
309
+ setup_pg_mock
310
+ setup_s3_mock(expected_data) end
311
+
312
+ def test_write_with_csv
313
+ setup_mocks(%[val_a,val_b,val_c,val_d\nval_e,val_f,val_g,val_h\n])
314
+ setup_tempfile_mock_to_be_closed
315
+ d_csv = create_driver
316
+ emit_csv(d_csv)
317
+ assert_equal true, d_csv.run
318
+ end
319
+
320
+ def test_write_with_json
321
+ setup_mocks(%[val_a\tval_b\t\t\t\t\t\t\n\t\tval_c\tval_d\t\t\t\t\n])
322
+ setup_tempfile_mock_to_be_closed
323
+ d_json = create_driver(CONFIG_JSON)
324
+ emit_json(d_json)
325
+ assert_equal true, d_json.run
326
+ end
327
+
328
+ def test_write_with_json_hash_value
329
+ setup_mocks("val_a\t{\"foo\":\"var\"}\t\t\t\t\t\t\n\t\tval_c\tval_d\t\t\t\t\n")
330
+ d_json = create_driver(CONFIG_JSON)
331
+ d_json.emit({"log" => %[{"key_a" : "val_a", "key_b" : {"foo" : "var"}}]} , DEFAULT_TIME)
332
+ d_json.emit(RECORD_JSON_B, DEFAULT_TIME)
333
+ assert_equal true, d_json.run
334
+ end
335
+
336
+ def test_write_with_json_array_value
337
+ setup_mocks("val_a\t[\"foo\",\"var\"]\t\t\t\t\t\t\n\t\tval_c\tval_d\t\t\t\t\n")
338
+ d_json = create_driver(CONFIG_JSON)
339
+ d_json.emit({"log" => %[{"key_a" : "val_a", "key_b" : ["foo", "var"]}]} , DEFAULT_TIME)
340
+ d_json.emit(RECORD_JSON_B, DEFAULT_TIME)
341
+ assert_equal true, d_json.run
342
+ end
343
+
344
+ def test_write_with_json_including_tab_newline_quote
345
+ setup_mocks("val_a_with_\\\t_tab_\\\n_newline\tval_b_with_\\\\_quote\t\t\t\t\t\t\n\t\tval_c\tval_d\t\t\t\t\n")
346
+ d_json = create_driver(CONFIG_JSON)
347
+ d_json.emit({"log" => %[{"key_a" : "val_a_with_\\t_tab_\\n_newline", "key_b" : "val_b_with_\\\\_quote"}]} , DEFAULT_TIME)
348
+ d_json.emit(RECORD_JSON_B, DEFAULT_TIME)
349
+ assert_equal true, d_json.run
350
+ end
351
+
352
+ def test_write_with_json_no_data
353
+ setup_mocks("")
354
+ d_json = create_driver(CONFIG_JSON)
355
+ d_json.emit("", DEFAULT_TIME)
356
+ d_json.emit("", DEFAULT_TIME)
357
+ assert_equal false, d_json.run
358
+ end
359
+
360
+ def test_write_with_json_invalid_one_line
361
+ setup_mocks(%[\t\tval_c\tval_d\t\t\t\t\n])
362
+ d_json = create_driver(CONFIG_JSON)
363
+ d_json.emit({"log" => %[}}]}, DEFAULT_TIME)
364
+ d_json.emit(RECORD_JSON_B, DEFAULT_TIME)
365
+ assert_equal true, d_json.run
366
+ end
367
+
368
+ def test_write_with_json_no_available_data
369
+ setup_mocks(%[val_a\tval_b\t\t\t\t\t\t\n])
370
+ d_json = create_driver(CONFIG_JSON)
371
+ d_json.emit(RECORD_JSON_A, DEFAULT_TIME)
372
+ d_json.emit({"log" => %[{"key_o" : "val_o", "key_p" : "val_p"}]}, DEFAULT_TIME)
373
+ assert_equal true, d_json.run
374
+ end
375
+
376
+ def test_write_with_msgpack
377
+ setup_mocks(%[val_a\tval_b\t\t\t\t\t\t\n\t\tval_c\tval_d\t\t\t\t\n])
378
+ d_msgpack = create_driver(CONFIG_MSGPACK)
379
+ emit_msgpack(d_msgpack)
380
+ assert_equal true, d_msgpack.run
381
+ end
382
+
383
+ def test_write_with_msgpack_hash_value
384
+ setup_mocks("val_a\t{\"foo\":\"var\"}\t\t\t\t\t\t\n\t\tval_c\tval_d\t\t\t\t\n")
385
+ d_msgpack = create_driver(CONFIG_MSGPACK)
386
+ d_msgpack.emit({"key_a" => "val_a", "key_b" => {"foo" => "var"}} , DEFAULT_TIME)
387
+ d_msgpack.emit(RECORD_MSGPACK_B, DEFAULT_TIME)
388
+ assert_equal true, d_msgpack.run
389
+ end
390
+
391
+ def test_write_with_msgpack_array_value
392
+ setup_mocks("val_a\t[\"foo\",\"var\"]\t\t\t\t\t\t\n\t\tval_c\tval_d\t\t\t\t\n")
393
+ d_msgpack = create_driver(CONFIG_MSGPACK)
394
+ d_msgpack.emit({"key_a" => "val_a", "key_b" => ["foo", "var"]} , DEFAULT_TIME)
395
+ d_msgpack.emit(RECORD_MSGPACK_B, DEFAULT_TIME)
396
+ assert_equal true, d_msgpack.run
397
+ end
398
+
399
+ def test_write_with_msgpack_including_tab_newline_quote
400
+ setup_mocks("val_a_with_\\\t_tab_\\\n_newline\tval_b_with_\\\\_quote\t\t\t\t\t\t\n\t\tval_c\tval_d\t\t\t\t\n")
401
+ d_msgpack = create_driver(CONFIG_MSGPACK)
402
+ d_msgpack.emit({"key_a" => "val_a_with_\t_tab_\n_newline", "key_b" => "val_b_with_\\_quote"} , DEFAULT_TIME)
403
+ d_msgpack.emit(RECORD_MSGPACK_B, DEFAULT_TIME)
404
+ assert_equal true, d_msgpack.run
405
+ end
406
+
407
+ def test_write_with_msgpack_no_data
408
+ setup_mocks("")
409
+ d_msgpack = create_driver(CONFIG_MSGPACK)
410
+ d_msgpack.emit({}, DEFAULT_TIME)
411
+ d_msgpack.emit({}, DEFAULT_TIME)
412
+ assert_equal false, d_msgpack.run
413
+ end
414
+
415
+ def test_write_with_msgpack_no_available_data
416
+ setup_mocks(%[val_a\tval_b\t\t\t\t\t\t\n])
417
+ d_msgpack = create_driver(CONFIG_MSGPACK)
418
+ d_msgpack.emit(RECORD_MSGPACK_A, DEFAULT_TIME)
419
+ d_msgpack.emit({"key_o" => "val_o", "key_p" => "val_p"}, DEFAULT_TIME)
420
+ assert_equal true, d_msgpack.run
421
+ end
422
+
423
+ def test_write_redshift_connection_error
424
+ def PG.connect(dbinfo)
425
+ return Class.new do
426
+ def initialize(return_keys=[]); end
427
+ def exec(sql)
428
+ raise PG::Error, "redshift connection error"
429
+ end
430
+ def close; end
431
+ end.new
432
+ end
433
+ setup_s3_mock(%[val_a,val_b,val_c,val_d\nval_e,val_f,val_g,val_h\n])
434
+
435
+ d_csv = create_driver
436
+ emit_csv(d_csv)
437
+ assert_raise(PG::Error) {
438
+ d_csv.run
439
+ }
440
+ end
441
+
442
+ def test_write_redshift_load_error
443
+ PG::Error.module_eval { attr_accessor :result}
444
+ def PG.connect(dbinfo)
445
+ return Class.new do
446
+ def initialize(return_keys=[]); end
447
+ def exec(sql)
448
+ error = PG::Error.new("ERROR: Load into table 'apache_log' failed. Check 'stl_load_errors' system table for details.")
449
+ error.result = "ERROR: Load into table 'apache_log' failed. Check 'stl_load_errors' system table for details."
450
+ raise error
451
+ end
452
+ def close; end
453
+ end.new
454
+ end
455
+ setup_s3_mock(%[val_a,val_b,val_c,val_d\nval_e,val_f,val_g,val_h\n])
456
+
457
+ d_csv = create_driver
458
+ emit_csv(d_csv)
459
+ assert_equal false, d_csv.run
460
+ end
461
+
462
+ def test_write_with_json_redshift_connection_error
463
+ def PG.connect(dbinfo)
464
+ return Class.new do
465
+ def initialize(return_keys=[]); end
466
+ def exec(sql, &block)
467
+ error = PG::Error.new("redshift connection error")
468
+ raise error
469
+ end
470
+ def close; end
471
+ end.new
472
+ end
473
+ setup_s3_mock(%[val_a,val_b,val_c,val_d\nval_e,val_f,val_g,val_h\n])
474
+
475
+ d_json = create_driver(CONFIG_JSON)
476
+ emit_json(d_json)
477
+ assert_raise(PG::Error) {
478
+ d_json.run
479
+ }
480
+ end
481
+
482
+ def test_write_with_json_no_table_on_redshift
483
+ def PG.connect(dbinfo)
484
+ return Class.new do
485
+ def initialize(return_keys=[]); end
486
+ def exec(sql, &block)
487
+ yield [] if block_given?
488
+ end
489
+ def close; end
490
+ end.new
491
+ end
492
+ setup_s3_mock(%[val_a,val_b,val_c,val_d\nval_e,val_f,val_g,val_h\n])
493
+
494
+ d_json = create_driver(CONFIG_JSON)
495
+ emit_json(d_json)
496
+ assert_equal false, d_json.run
497
+ end
498
+
499
+ def test_write_with_json_failed_to_get_columns
500
+ def PG.connect(dbinfo)
501
+ return Class.new do
502
+ def initialize(return_keys=[]); end
503
+ def exec(sql, &block)
504
+ end
505
+ def close; end
506
+ end.new
507
+ end
508
+ setup_s3_mock("")
509
+
510
+ d_json = create_driver(CONFIG_JSON)
511
+ emit_json(d_json)
512
+ assert_raise(RuntimeError, "failed to fetch the redshift table definition.") {
513
+ d_json.run
514
+ }
515
+ end
516
+
517
+ def test_write_with_json_fetch_column_with_schema
518
+ def PG.connect(dbinfo)
519
+ return PGConnectionMock.new(:schemaname => 'test_schema')
520
+ end
521
+ setup_s3_mock(%[val_a\tval_b\t\t\t\t\t\t\n\t\tval_c\tval_d\t\t\t\t\n])
522
+ d_json = create_driver(CONFIG_JSON_WITH_SCHEMA)
523
+ emit_json(d_json)
524
+ assert_equal true, d_json.run
525
+ end
526
+ end
@@ -0,0 +1,8 @@
1
+ if ENV['COVERAGE']
2
+ require 'simplecov'
3
+ SimpleCov.start do
4
+ add_filter 'test/'
5
+ add_filter 'pkg/'
6
+ add_filter 'vendor/'
7
+ end
8
+ end
metadata ADDED
@@ -0,0 +1,153 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: fluent-plugin-redshift-auto
3
+ version: !ruby/object:Gem::Version
4
+ prerelease: false
5
+ segments:
6
+ - 0
7
+ - 0
8
+ - 2
9
+ version: 0.0.2
10
+ platform: ruby
11
+ authors:
12
+ - Takashi Honda
13
+ autorequire:
14
+ bindir: bin
15
+ cert_chain: []
16
+
17
+ date: 2013-10-18 00:00:00 +09:00
18
+ default_executable:
19
+ dependencies:
20
+ - !ruby/object:Gem::Dependency
21
+ name: fluentd
22
+ prerelease: false
23
+ requirement: &id001 !ruby/object:Gem::Requirement
24
+ requirements:
25
+ - - ~>
26
+ - !ruby/object:Gem::Version
27
+ segments:
28
+ - 0
29
+ - 10
30
+ - 0
31
+ version: 0.10.0
32
+ type: :runtime
33
+ version_requirements: *id001
34
+ - !ruby/object:Gem::Dependency
35
+ name: aws-sdk
36
+ prerelease: false
37
+ requirement: &id002 !ruby/object:Gem::Requirement
38
+ requirements:
39
+ - - ">="
40
+ - !ruby/object:Gem::Version
41
+ segments:
42
+ - 1
43
+ - 6
44
+ - 3
45
+ version: 1.6.3
46
+ type: :runtime
47
+ version_requirements: *id002
48
+ - !ruby/object:Gem::Dependency
49
+ name: pg
50
+ prerelease: false
51
+ requirement: &id003 !ruby/object:Gem::Requirement
52
+ requirements:
53
+ - - ~>
54
+ - !ruby/object:Gem::Version
55
+ segments:
56
+ - 0
57
+ - 14
58
+ - 0
59
+ version: 0.14.0
60
+ type: :runtime
61
+ version_requirements: *id003
62
+ - !ruby/object:Gem::Dependency
63
+ name: rake
64
+ prerelease: false
65
+ requirement: &id004 !ruby/object:Gem::Requirement
66
+ requirements:
67
+ - - ">="
68
+ - !ruby/object:Gem::Version
69
+ segments:
70
+ - 0
71
+ version: "0"
72
+ type: :development
73
+ version_requirements: *id004
74
+ - !ruby/object:Gem::Dependency
75
+ name: simplecov
76
+ prerelease: false
77
+ requirement: &id005 !ruby/object:Gem::Requirement
78
+ requirements:
79
+ - - ">="
80
+ - !ruby/object:Gem::Version
81
+ segments:
82
+ - 0
83
+ - 5
84
+ - 4
85
+ version: 0.5.4
86
+ type: :development
87
+ version_requirements: *id005
88
+ - !ruby/object:Gem::Dependency
89
+ name: flexmock
90
+ prerelease: false
91
+ requirement: &id006 !ruby/object:Gem::Requirement
92
+ requirements:
93
+ - - ">="
94
+ - !ruby/object:Gem::Version
95
+ segments:
96
+ - 1
97
+ - 3
98
+ - 1
99
+ version: 1.3.1
100
+ type: :development
101
+ version_requirements: *id006
102
+ description: Amazon Redshift output plugin for Fluentd with creating table
103
+ email:
104
+ - takashi.0628.honda@gmail.com
105
+ executables: []
106
+
107
+ extensions: []
108
+
109
+ extra_rdoc_files: []
110
+
111
+ files:
112
+ - .gitignore
113
+ - Gemfile
114
+ - README.md
115
+ - Rakefile
116
+ - VERSION
117
+ - fluent-plugin-redshift-auto.gemspec
118
+ - lib/fluent/plugin/out_redshift_auto.rb
119
+ - test/plugin/test_out_redshift_auto.rb
120
+ - test/test_helper.rb
121
+ has_rdoc: true
122
+ homepage: https://github.com/takashi-honda/fluent-plugin-redshift-auto
123
+ licenses: []
124
+
125
+ post_install_message:
126
+ rdoc_options: []
127
+
128
+ require_paths:
129
+ - lib
130
+ required_ruby_version: !ruby/object:Gem::Requirement
131
+ requirements:
132
+ - - ">="
133
+ - !ruby/object:Gem::Version
134
+ segments:
135
+ - 0
136
+ version: "0"
137
+ required_rubygems_version: !ruby/object:Gem::Requirement
138
+ requirements:
139
+ - - ">="
140
+ - !ruby/object:Gem::Version
141
+ segments:
142
+ - 0
143
+ version: "0"
144
+ requirements: []
145
+
146
+ rubyforge_project:
147
+ rubygems_version: 1.3.6
148
+ signing_key:
149
+ specification_version: 3
150
+ summary: Amazon Redshift output plugin for Fluentd with creating table
151
+ test_files:
152
+ - test/plugin/test_out_redshift_auto.rb
153
+ - test/test_helper.rb