fluent-plugin-redshift-anton 1.0.1

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA1:
3
+ metadata.gz: fba5c12a592c44c13ab3faadd0842a13fd2f635d
4
+ data.tar.gz: 55a73f89f4174e1a58aa07708d05aa9a6184df17
5
+ SHA512:
6
+ metadata.gz: 263d0db356aabc5c02cfb08497a3da6bbe1d2cc1fcc38c1f03ceec0a7ceda31e2ef2e14be9b2c3af71dabeb68e185191c4c8907c5ebe97973e34f9144b58ca60
7
+ data.tar.gz: 825d2401ddbed069b12916698c911ec18028b60f2a6e258240377b6e3e11cfe14b7a4bfb6cc9985bd494bea13f932de939253fcc045c47dc481fa6efea3b00dd
data/Gemfile ADDED
@@ -0,0 +1,3 @@
1
+ source 'https://rubygems.org'
2
+
3
+ gemspec
@@ -0,0 +1,162 @@
1
+ Amazon Redshift output plugin for Fluentd
2
+ ========
3
+
4
+ ## Overview
5
+
6
+ Amazon Redshift output plugin uploads event logs to an Amazon Redshift Cluster. Supportted data formats are csv, tsv and json. An S3 bucket and a Redshift Cluster are required to use this plugin.
7
+
8
+ ## Installation
9
+
10
+ gem install fluent-plugin-redshift-anton
11
+
12
+ ## Configuration
13
+
14
+ Format:
15
+
16
+ <match my.tag>
17
+ type redshift_anton
18
+
19
+ # s3 (for copying data to redshift)
20
+ aws_key_id YOUR_AWS_KEY_ID
21
+ aws_sec_key YOUR_AWS_SECRET_KEY
22
+ s3_bucket YOUR_S3_BUCKET
23
+ s3_endpoint YOUR_S3_BUCKET_END_POINT
24
+ path YOUR_S3_PATH
25
+ timestamp_key_format year=%Y/month=%m/day=%d/hour=%H/%Y%m%d-%H%M
26
+
27
+ # redshift
28
+ redshift_host YOUR_AMAZON_REDSHIFT_CLUSTER_END_POINT
29
+ redshift_port YOUR_AMAZON_REDSHIFT_CLUSTER_PORT
30
+ redshift_dbname YOUR_AMAZON_REDSHIFT_CLUSTER_DATABASE_NAME
31
+ redshift_user YOUR_AMAZON_REDSHIFT_CLUSTER_USER_NAME
32
+ redshift_password YOUR_AMAZON_REDSHIFT_CLUSTER_PASSWORD
33
+ redshift_schemaname YOUR_AMAZON_REDSHIFT_CLUSTER_TARGET_SCHEMA_NAME
34
+ redshift_tablename YOUR_AMAZON_REDSHIFT_CLUSTER_TARGET_TABLE_NAME
35
+ make_auto_table 1 # 1 => make table auto 0 => no
36
+ tag_table 1 # 1=> tag_name = table_name, 0 => no, use redshift_atablename
37
+ file_type [tsv|csv|json|msgpack]
38
+ varchar_length ALL_COLUMNS_VARCHAR_LENGTH
39
+
40
+
41
+ # buffer
42
+ buffer_type file
43
+ buffer_path /var/log/fluent/redshift
44
+ flush_interval 15m
45
+ buffer_chunk_limit 1g
46
+ </match>
47
+
48
+ Example (watch and upload json formatted apache log):
49
+
50
+ <source>
51
+ type tail
52
+ path redshift_test.json
53
+ pos_file redshift_test_json.pos
54
+ tag redshift.json
55
+ format /^(?<log>.*)$/
56
+ </source>
57
+
58
+ <match redshift.json>
59
+ type redshift
60
+
61
+ # s3 (for copying data to redshift)
62
+ aws_key_id YOUR_AWS_KEY_ID
63
+ aws_sec_key YOUR_AWS_SECRET_KEY
64
+ s3_bucket hapyrus-example
65
+ s3_endpoint s3.amazonaws.com
66
+ path path/on/s3/apache_json_log/
67
+ timestamp_key_format year=%Y/month=%m/day=%d/hour=%H/%Y%m%d-%H%M
68
+
69
+ # redshift
70
+ redshift_host xxx-yyy-zzz.xxxxxxxxxx.us-east-1.redshift.amazonaws.com
71
+ redshift_port 5439
72
+ redshift_dbname fluent-redshift-test
73
+ redshift_user fluent
74
+ redshift_password fluent-password
75
+ redshift_tablename apache_log
76
+ file_type json
77
+
78
+ # buffer
79
+ buffer_type file
80
+ buffer_path /var/log/fluent/redshift
81
+ flush_interval 15m
82
+ buffer_chunk_limit 1g
83
+ <match>
84
+
85
+ + `type` (required) : The value must be `redshift`.
86
+
87
+ + `aws_key_id` (required) : AWS access key id to access s3 bucket.
88
+
89
+ + `aws_sec_key` (required) : AWS securet key id to access s3 bucket.
90
+
91
+ + `s3_bucket` (required) : s3 bucket name. S3 bucket must be same as the region of your Redshift cluster.
92
+
93
+ + `s3_endpoint` : s3 endpoint.
94
+
95
+ + `path` (required) : s3 path to input.
96
+
97
+ + `timestamp_key_format` : The format of the object keys. It can include date-format directives.
98
+
99
+ - Default parameter is "year=%Y/month=%m/day=%d/hour=%H/%Y%m%d-%H%M"
100
+ - For example, the s3 path is as following with the above example configration.
101
+ <pre>
102
+ hapyrus-example/apache_json_log/year=2013/month=03/day=05/hour=12/20130305_1215_00.gz
103
+ hapyrus-example/apache_json_log/year=2013/month=03/day=05/hour=12/20130305_1230_00.gz
104
+ </pre>
105
+
106
+ + `redshift_host` (required) : the end point(or hostname) of your Amazon Redshift cluster.
107
+
108
+ + `redshift_port` (required) : port number.
109
+
110
+ + `redshift_dbname` (required) : database name.
111
+
112
+ + `redshift_user` (required) : user name.
113
+
114
+ + `redshift_password` (required) : password for the user name.
115
+
116
+ + `redshift_tablename` (required) : table name to store data.
117
+
118
+ + `redshift_schemaname` : schema name to store data. By default, this option is not set and find table without schema as your own search_path.
119
+
120
+ + `make_auto_table` (optional, integer) : whether make tables automatically. If you set 1, this makes tables automatically else if you set 0, doesn't make.
121
+
122
+ + `tag_table` (optional, integer) : whether table_name equals tag_name. If you set 1, it shows tag_name equals table_name, else if you set 0, it's not.
123
+
124
+ + `file_type` : file format of the source data. `csv`, `tsv`, `msgpack` or `json` are available.
125
+
126
+ + `delimiter` : delimiter of the source data. This option will be ignored if `file_type` is specified.
127
+
128
+ + `buffer_type` : buffer type.
129
+
130
+ + `buffer_path` : path prefix of the files to buffer logs.
131
+
132
+ + `flush_interval` : flush interval.
133
+
134
+ + `buffer_chunk_limit` : limit buffer size to chunk.
135
+
136
+ + `utc` : utc time zone. This parameter affects `timestamp_key_format`.
137
+
138
+ ## Logging examples
139
+ ```ruby
140
+ # examples by fluent-logger
141
+ require 'fluent-logger'
142
+ log = Fluent::Logger::FluentLogger.new(nil, :host => 'localhost', :port => 24224)
143
+
144
+ # file_type: csv
145
+ log.post('your.tag', :log => "12345,12345")
146
+
147
+ # file_type: tsv
148
+ log.post('your.tag', :log => "12345\t12345")
149
+
150
+ # file_type: json
151
+ require 'json'
152
+ log.post('your.tag', :log => { :user_id => 12345, :data_id => 12345 }.to_json)
153
+
154
+ # file_type: msgpack
155
+ log.post('your.tag', :user_id => 12345, :data_id => 12345)
156
+ ```
157
+
158
+ ## License
159
+
160
+ Copyright (c) 2013 [Hapyrus Inc](http://hapyrus.com)
161
+
162
+ [Apache License, Version 2.0](http://www.apache.org/licenses/LICENSE-2.0)
@@ -0,0 +1,16 @@
1
+ require "bundler"
2
+ Bundler::GemHelper.install_tasks
3
+ require 'rake/testtask'
4
+
5
+ Rake::TestTask.new(:test) do |test|
6
+ test.libs << 'lib' << 'test'
7
+ test.test_files = FileList['test/plugin/*.rb']
8
+ test.verbose = true
9
+ end
10
+
11
+ task :coverage do |t|
12
+ ENV['COVERAGE'] = '1'
13
+ Rake::Task["test"].invoke
14
+ end
15
+
16
+ task :default => [:build]
data/VERSION ADDED
@@ -0,0 +1 @@
1
+ 1.0.1
@@ -0,0 +1,25 @@
1
+ # -*- encoding: utf-8 -*-
2
+ $:.push File.expand_path('../lib', __FILE__)
3
+
4
+ Gem::Specification.new do |gem|
5
+ gem.name = "fluent-plugin-redshift-anton"
6
+ gem.version = File.read("VERSION").strip
7
+ gem.authors = ["Anton Kuchinsky"]
8
+ gem.email = ["akuchinsky@gmail.com"]
9
+ gem.description = %q{Amazon Redshift output plugin for Fluentd with creating table}
10
+ gem.summary = gem.description
11
+ gem.homepage = "https://github.com/akuchins/fluent-plugin-redshift-anton"
12
+ gem.has_rdoc = false
13
+
14
+ gem.files = `git ls-files`.split($/)
15
+ gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
16
+ gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
17
+ gem.require_paths = ["lib"]
18
+
19
+ gem.add_dependency "fluentd", ">= 0.10.0"
20
+ gem.add_dependency "aws-sdk", ">= 1.6.3"
21
+ gem.add_dependency "pg", ">= 0.14.0"
22
+ gem.add_development_dependency "rake"
23
+ gem.add_development_dependency "simplecov", ">= 0.5.4"
24
+ gem.add_development_dependency "flexmock", ">= 1.3.1"
25
+ end
@@ -0,0 +1,330 @@
1
+ module Fluent
2
+
3
+
4
+ class RedshiftOutput < BufferedOutput
5
+ Fluent::Plugin.register_output('redshift_anton', self)
6
+
7
+ # ignore load table error. (invalid data format)
8
+ IGNORE_REDSHIFT_ERROR_REGEXP = /^ERROR: Load into table '[^']+' failed\./
9
+
10
+ def initialize
11
+ super
12
+ require 'aws-sdk'
13
+ require 'zlib'
14
+ require 'time'
15
+ require 'tempfile'
16
+ require 'pg'
17
+ require 'json'
18
+ require 'csv'
19
+ end
20
+
21
+ config_param :record_log_tag, :string, :default => 'log'
22
+ # s3
23
+ config_param :aws_key_id, :string
24
+ config_param :aws_sec_key, :string
25
+ config_param :s3_bucket, :string
26
+ config_param :s3_endpoint, :string, :default => nil
27
+ config_param :path, :string, :default => ""
28
+ config_param :timestamp_key_format, :string, :default => 'year=%Y/month=%m/day=%d/hour=%H/%Y%m%d-%H%M'
29
+ config_param :utc, :bool, :default => false
30
+ # redshift
31
+ config_param :redshift_host, :string
32
+ config_param :redshift_port, :integer, :default => 5439
33
+ config_param :redshift_dbname, :string
34
+ config_param :redshift_user, :string
35
+ config_param :redshift_password, :string
36
+ config_param :redshift_tablename, :string
37
+ config_param :redshift_schemaname, :string, :default => "public"
38
+ config_param :redshift_copy_base_options, :string , :default => "FILLRECORD ACCEPTANYDATE TRUNCATECOLUMNS"
39
+ config_param :make_auto_table, :integer, :default => 1 #1 => make_auto 0=> no
40
+ config_param :tag_table, :integer, :default => 1 #1 => tag_name = table_name, 0 => no
41
+ # file format
42
+ config_param :file_type, :string, :default => nil # json, tsv, csv
43
+ config_param :delimiter, :string, :default => nil
44
+ # for debug
45
+ config_param :log_suffix, :string, :default => ''
46
+ # for varchar length
47
+ config_param :varchar_length, :integer, :default => 255
48
+
49
+ def configure(conf)
50
+ super
51
+ @path = "#{@path}/" if /.+[^\/]$/ =~ @path
52
+ @path = "" if @path == "/"
53
+ @utc = true if conf['utc']
54
+ @db_conf = {
55
+ host:@redshift_host,
56
+ port:@redshift_port,
57
+ dbname:@redshift_dbname,
58
+ user:@redshift_user,
59
+ password:@redshift_password
60
+ }
61
+ @delimiter = determine_delimiter(@file_type) if @delimiter.nil? or @delimiter.empty?
62
+ $log.debug format_log("redshift file_type:#{@file_type} delimiter:'#{@delimiter}'")
63
+ @copy_sql_template = "copy #{@redshift_schemaname}.%s from '%s' CREDENTIALS 'aws_access_key_id=#{@aws_key_id};aws_secret_access_key=%s' delimiter '#{@delimiter}' GZIP TRUNCATECOLUMNS ESCAPE #{@redshift_copy_base_options};"
64
+ end
65
+
66
+ def start
67
+ super
68
+ # init s3 conf
69
+ $log.debug format_log("redshift file_type:#{@file_type} delimiter:'#{@delimiter}'")
70
+ options = {
71
+ :access_key_id => @aws_key_id,
72
+ :secret_access_key => @aws_sec_key
73
+ }
74
+ options[:s3_endpoint] = @s3_endpoint if @s3_endpoint
75
+ @s3 = AWS::S3.new(options)
76
+ @bucket = @s3.buckets[@s3_bucket]
77
+ end
78
+
79
+ def format(tag, time, record)
80
+ record = JSON.generate(record)
81
+ if @make_auto_table == 1 && json?
82
+ json = JSON.parse(record)
83
+ cols = []
84
+ json.each do |key,val|
85
+ cols.push("#{key}")
86
+ end
87
+ make_table_from_tag_name(tag, cols)
88
+ end
89
+
90
+ (json?) ? record.to_msgpack : "#{record[@record_log_tag]}\n"
91
+ end
92
+
93
+ def write(chunk)
94
+ $log.debug format_log("start creating gz.")
95
+ if @tag_table == 1 then
96
+ file_name = File::basename(chunk.path)
97
+ table_name = file_name.sub(/\..*/, "")
98
+ else
99
+ table_name = @redshift_tablename
100
+ end
101
+
102
+ # create a gz file
103
+ tmp = Tempfile.new("s3-")
104
+ tmp = (json?) ? create_gz_file_from_json(tmp, chunk, @delimiter)
105
+ : create_gz_file_from_msgpack(tmp, chunk)
106
+
107
+ # no data -> skip
108
+ unless tmp
109
+ $log.debug format_log("received no valid data. ")
110
+ return false # for debug
111
+ end
112
+
113
+ # create a file path with time format
114
+ s3path = create_s3path(@bucket, @path)
115
+
116
+ # upload gz to s3
117
+ @bucket.objects[s3path].write(Pathname.new(tmp.path),
118
+ :acl => :bucket_owner_full_control)
119
+ # copy gz on s3 to redshift
120
+ s3_uri = "s3://#{@s3_bucket}/#{s3path}"
121
+ sql = @copy_sql_template % [table_name, s3_uri, @aws_sec_key]
122
+ $log.debug format_log("start copying. s3_uri=#{s3_uri}")
123
+ conn = nil
124
+ begin
125
+ conn = PG.connect(@db_conf)
126
+ conn.exec(sql)
127
+ $log.info format_log("completed copying to redshift. s3_uri=#{s3_uri}")
128
+ rescue PG::Error => e
129
+ $log.error format_log("failed to copy data into redshift. s3_uri=#{s3_uri}"), :error=>e.to_s
130
+ raise e unless e.to_s =~ IGNORE_REDSHIFT_ERROR_REGEXP
131
+ return false # for debug
132
+ ensure
133
+ conn.close rescue nil if conn
134
+ end
135
+ true # for debug
136
+ end
137
+
138
+ protected
139
+ def format_log(message)
140
+ (@log_suffix and not @log_suffix.empty?) ? "#{message} #{@log_suffix}" : message
141
+ end
142
+
143
+ private
144
+ def json?
145
+ @file_type == 'json'
146
+ end
147
+
148
+ def create_gz_file_from_msgpack(dst_file, chunk)
149
+ gzw = nil
150
+ begin
151
+ gzw = Zlib::GzipWriter.new(dst_file)
152
+ chunk.write_to(gzw)
153
+ ensure
154
+ gzw.close rescue nil if gzw
155
+ end
156
+ dst_file
157
+ end
158
+
159
+ def create_gz_file_from_json(dst_file, chunk, delimiter)
160
+ # fetch the table definition from redshift
161
+ redshift_table_columns = fetch_table_columns
162
+ if redshift_table_columns == nil
163
+ raise "failed to fetch the redshift table definition."
164
+ elsif redshift_table_columns.empty?
165
+ $log.warn format_log("no table on redshift. table_name=#{@redshift_tablename}")
166
+ return nil
167
+ end
168
+
169
+ # convert json to tsv format text
170
+ gzw = nil
171
+ begin
172
+ gzw = Zlib::GzipWriter.new(dst_file)
173
+ $log.debug format_log("redshift file_type:#{@file_type} delimiter:'#{@delimiter}'")
174
+ chunk.msgpack_each do |record|
175
+ begin
176
+ tsv_text = json_to_table_text(redshift_table_columns, record, delimiter)
177
+ gzw.write(tsv_text) if tsv_text and not tsv_text.empty?
178
+ rescue => e
179
+ $log.error format_log("failed to create table text from json. text=(#{record[@record_log_tag]})"), :error=>$!.to_s
180
+ $log.error_backtrace
181
+ end
182
+ end
183
+ return nil unless gzw.pos > 0
184
+ ensure
185
+ gzw.close rescue nil if gzw
186
+ end
187
+ dst_file
188
+ end
189
+
190
+ def determine_delimiter(file_type)
191
+ case file_type
192
+ when 'json', 'tsv'
193
+ "\t"
194
+ when "csv"
195
+ ','
196
+ else
197
+ raise Fluent::ConfigError, "Invalid file_type:#{file_type}."
198
+ end
199
+ end
200
+
201
+ def fetch_table_columns
202
+ fetch_columns_sql = "select column_name from INFORMATION_SCHEMA.COLUMNS where table_name = '#{@redshift_tablename}' and table_schema = '#{@redshift_schemaname}' order by ordinal_position;"
203
+ conn = PG.connect(@db_conf)
204
+ begin
205
+ columns = nil
206
+ conn.exec(fetch_columns_sql) do |result|
207
+ columns = result.collect{|row| row['column_name']}
208
+ end
209
+ columns
210
+ ensure
211
+ conn.close rescue nil
212
+ end
213
+ end
214
+
215
+ def json_to_table_text(redshift_table_columns, json_text, delimiter)
216
+ return "" if json_text.nil? or json_text.empty?
217
+
218
+ # parse json text
219
+ json_obj = nil
220
+ begin
221
+ json_obj = JSON.parse(json_text)
222
+ rescue => e
223
+ $log.warn format_log("failed to parse json. "), :error=>e.to_s
224
+ return ""
225
+ end
226
+ return "" unless json_obj
227
+
228
+ # extract values from json
229
+ val_list = redshift_table_columns.collect do |cn|
230
+ val = json_obj[cn]
231
+ val = nil unless val and not val.to_s.empty?
232
+ val = JSON.generate(val) if val.kind_of?(Hash) or val.kind_of?(Array)
233
+ val.to_s unless val.nil?
234
+ end
235
+ if val_list.all?{|v| v.nil? or v.empty?}
236
+ $log.warn format_log("no data match for table columns on redshift. json_text=#{json_text} table_columns=#{redshift_table_columns}")
237
+ return ""
238
+ end
239
+
240
+ generate_line_with_delimiter(val_list, delimiter)
241
+ end
242
+
243
+ def generate_line_with_delimiter(val_list, delimiter)
244
+ val_list = val_list.collect do |val|
245
+ if val.nil? or val.empty?
246
+ ""
247
+ else
248
+ val.gsub(/\\/, "\\\\\\").gsub(/\t/, "\\\t").gsub(/\n/, "\\\n") # escape tab, newline and backslash
249
+ end
250
+ end
251
+ val_list.join(delimiter) + "\n"
252
+ end
253
+
254
+ def create_s3path(bucket, path)
255
+ timestamp_key = (@utc) ? Time.now.utc.strftime(@timestamp_key_format) : Time.now.strftime(@timestamp_key_format)
256
+ i = 0
257
+ begin
258
+ suffix = "_#{'%02d' % i}"
259
+ s3path = "#{path}#{timestamp_key}#{suffix}.gz"
260
+ i += 1
261
+ end while bucket.objects[s3path].exists?
262
+ s3path
263
+ end
264
+
265
+ def make_table_from_tag_name(tag, columns_arr)
266
+
267
+ conn = PG.connect(@db_conf)
268
+ sql = "SELECT table_name FROM INFORMATION_SCHEMA.TABLES WHERE table_name LIKE '#{tag}';"
269
+
270
+ cnt = 0
271
+ conn.exec(sql).each do |r|
272
+ cnt = cnt + 1
273
+ end
274
+
275
+ if cnt >= 1
276
+ return
277
+ end
278
+
279
+ cols = ""
280
+ for col_name in columns_arr do
281
+ cols = cols + "\"#{col_name}\" varchar(#{varchar_length}),"
282
+ end
283
+
284
+ len = cols.length
285
+ cols.slice!(len - 1)
286
+
287
+ if @redshift_schemaname && @redshift_schemaname != "public"
288
+ sql = "SELECT nspname FROM pg_namespace WHERE nspname LIKE '#{@redshift_schemaname}';"
289
+ cnt = 0
290
+ conn.exec(sql).each do |r|
291
+ cnt = cnt + 1
292
+ end
293
+
294
+ if cnt == 0
295
+ sql = "CREATE SCHEMA #{@redshift_schemaname}"
296
+ begin
297
+ conn.exec(sql)
298
+ rescue PGError => e
299
+ $log.error format_log("failed CREATE SCHEMA schema_name: #{@redshift_schemaname}")
300
+ $log.error format_log("class: " + e.class + " msg: " + e.message)
301
+ rescue => e
302
+ $log.error format_log("failed CREATE SCHEMA schema_name: #{@redshift_schemaname}")
303
+ $log.error format_log("class: " + e.class + " msg: " + e.message)
304
+ end
305
+ $log.info format_log("SCHEMA CREATED: => #{sql}")
306
+ end
307
+ table_name = "#{@redshift_schemaname}.#{tag}"
308
+ else
309
+ table_name = "#{tag}"
310
+ end
311
+
312
+ sql = "CREATE TABLE #{table_name} (#{cols});"
313
+ begin
314
+ conn.exec(sql)
315
+ rescue PGError => e
316
+ $log.error format_log("failed CREATE TABLE table_name: #{table_name}")
317
+ $log.error format_log("class: " + e.class + " msg: " + e.message)
318
+ rescue => e
319
+ $log.error format_log("failed CREATE TABLE table_name: #{table_name}")
320
+ $log.error format_log("class: " + e.class + " msg: " + e.message)
321
+ end
322
+ conn.close
323
+ $log.info format_log("TABLE CREATED: => #{sql}")
324
+ end
325
+
326
+
327
+ end
328
+
329
+
330
+ end
@@ -0,0 +1,526 @@
1
+ require 'test_helper'
2
+
3
+ require 'fluent/test'
4
+ require 'fluent/plugin/out_redshift_auto'
5
+ require 'flexmock/test_unit'
6
+ require 'zlib'
7
+
8
+
9
+ class RedshiftOutputTest < Test::Unit::TestCase
10
+ def setup
11
+ require 'aws-sdk'
12
+ require 'pg'
13
+ require 'csv'
14
+ Fluent::Test.setup
15
+ end
16
+
17
+ CONFIG_BASE= %[
18
+ aws_key_id test_key_id
19
+ aws_sec_key test_sec_key
20
+ s3_bucket test_bucket
21
+ path log
22
+ redshift_host test_host
23
+ redshift_dbname test_db
24
+ redshift_user test_user
25
+ redshift_password test_password
26
+ redshift_tablename test_table
27
+ buffer_type memory
28
+ utc
29
+ log_suffix id:5 host:localhost
30
+ ]
31
+ CONFIG_CSV= %[
32
+ #{CONFIG_BASE}
33
+ file_type csv
34
+ ]
35
+ CONFIG_TSV= %[
36
+ #{CONFIG_BASE}
37
+ file_type tsv
38
+ ]
39
+ CONFIG_JSON = %[
40
+ #{CONFIG_BASE}
41
+ file_type json
42
+ ]
43
+ CONFIG_JSON_WITH_SCHEMA = %[
44
+ #{CONFIG_BASE}
45
+ redshift_schemaname test_schema
46
+ file_type json
47
+ ]
48
+ CONFIG_MSGPACK = %[
49
+ #{CONFIG_BASE}
50
+ file_type msgpack
51
+ ]
52
+ CONFIG_PIPE_DELIMITER= %[
53
+ #{CONFIG_BASE}
54
+ delimiter |
55
+ ]
56
+ CONFIG_PIPE_DELIMITER_WITH_NAME= %[
57
+ #{CONFIG_BASE}
58
+ file_type pipe
59
+ delimiter |
60
+ ]
61
+ CONFIG=CONFIG_CSV
62
+
63
+ RECORD_CSV_A = {"log" => %[val_a,val_b,val_c,val_d]}
64
+ RECORD_CSV_B = {"log" => %[val_e,val_f,val_g,val_h]}
65
+ RECORD_TSV_A = {"log" => %[val_a\tval_b\tval_c\tval_d]}
66
+ RECORD_TSV_B = {"log" => %[val_e\tval_f\tval_g\tval_h]}
67
+ RECORD_JSON_A = {"log" => %[{"key_a" : "val_a", "key_b" : "val_b"}]}
68
+ RECORD_JSON_B = {"log" => %[{"key_c" : "val_c", "key_d" : "val_d"}]}
69
+ RECORD_MSGPACK_A = {"key_a" => "val_a", "key_b" => "val_b"}
70
+ RECORD_MSGPACK_B = {"key_c" => "val_c", "key_d" => "val_d"}
71
+ DEFAULT_TIME = Time.parse("2013-03-06 12:15:02 UTC").to_i
72
+
73
+ def create_driver(conf = CONFIG, tag='test.input')
74
+ Fluent::Test::BufferedOutputTestDriver.new(Fluent::RedshiftOutput, tag).configure(conf)
75
+ end
76
+
77
+ def create_driver_no_write(conf = CONFIG, tag='test.input')
78
+ Fluent::Test::BufferedOutputTestDriver.new(Fluent::RedshiftOutput, tag) do
79
+ def write(chunk)
80
+ chunk.read
81
+ end
82
+ end.configure(conf)
83
+ end
84
+
85
+ def test_configure
86
+ assert_raise(Fluent::ConfigError) {
87
+ d = create_driver('')
88
+ }
89
+ assert_raise(Fluent::ConfigError) {
90
+ d = create_driver(CONFIG_BASE)
91
+ }
92
+ d = create_driver(CONFIG_CSV)
93
+ assert_equal "test_key_id", d.instance.aws_key_id
94
+ assert_equal "test_sec_key", d.instance.aws_sec_key
95
+ assert_equal "test_bucket", d.instance.s3_bucket
96
+ assert_equal "log/", d.instance.path
97
+ assert_equal "test_host", d.instance.redshift_host
98
+ assert_equal 5439, d.instance.redshift_port
99
+ assert_equal "test_db", d.instance.redshift_dbname
100
+ assert_equal "test_user", d.instance.redshift_user
101
+ assert_equal "test_password", d.instance.redshift_password
102
+ assert_equal "test_table", d.instance.redshift_tablename
103
+ assert_equal nil, d.instance.redshift_schemaname
104
+ assert_equal "FILLRECORD ACCEPTANYDATE TRUNCATECOLUMNS", d.instance.redshift_copy_base_options
105
+ assert_equal nil, d.instance.redshift_copy_options
106
+ assert_equal "csv", d.instance.file_type
107
+ assert_equal ",", d.instance.delimiter
108
+ assert_equal true, d.instance.utc
109
+ end
110
+ def test_configure_with_schemaname
111
+ d = create_driver(CONFIG_JSON_WITH_SCHEMA)
112
+ assert_equal "test_schema", d.instance.redshift_schemaname
113
+ end
114
+ def test_configure_localtime
115
+ d = create_driver(CONFIG_CSV.gsub(/ *utc */, ''))
116
+ assert_equal false, d.instance.utc
117
+ end
118
+ def test_configure_no_path
119
+ d = create_driver(CONFIG_CSV.gsub(/ *path *.+$/, ''))
120
+ assert_equal "", d.instance.path
121
+ end
122
+ def test_configure_root_path
123
+ d = create_driver(CONFIG_CSV.gsub(/ *path *.+$/, 'path /'))
124
+ assert_equal "", d.instance.path
125
+ end
126
+ def test_configure_path_with_slash
127
+ d = create_driver(CONFIG_CSV.gsub(/ *path *.+$/, 'path log/'))
128
+ assert_equal "log/", d.instance.path
129
+ end
130
+ def test_configure_path_starts_with_slash
131
+ d = create_driver(CONFIG_CSV.gsub(/ *path *.+$/, 'path /log/'))
132
+ assert_equal "log/", d.instance.path
133
+ end
134
+ def test_configure_path_starts_with_slash_without_last_slash
135
+ d = create_driver(CONFIG_CSV.gsub(/ *path *.+$/, 'path /log'))
136
+ assert_equal "log/", d.instance.path
137
+ end
138
+ def test_configure_tsv
139
+ d1 = create_driver(CONFIG_TSV)
140
+ assert_equal "tsv", d1.instance.file_type
141
+ assert_equal "\t", d1.instance.delimiter
142
+ end
143
+ def test_configure_json
144
+ d2 = create_driver(CONFIG_JSON)
145
+ assert_equal "json", d2.instance.file_type
146
+ assert_equal "\t", d2.instance.delimiter
147
+ end
148
+ def test_configure_msgpack
149
+ d2 = create_driver(CONFIG_MSGPACK)
150
+ assert_equal "msgpack", d2.instance.file_type
151
+ assert_equal "\t", d2.instance.delimiter
152
+ end
153
+ def test_configure_original_file_type
154
+ d3 = create_driver(CONFIG_PIPE_DELIMITER)
155
+ assert_equal nil, d3.instance.file_type
156
+ assert_equal "|", d3.instance.delimiter
157
+
158
+ d4 = create_driver(CONFIG_PIPE_DELIMITER_WITH_NAME)
159
+ assert_equal "pipe", d4.instance.file_type
160
+ assert_equal "|", d4.instance.delimiter
161
+ end
162
+ def test_configure_no_log_suffix
163
+ d = create_driver(CONFIG_CSV.gsub(/ *log_suffix *.+$/, ''))
164
+ assert_equal "", d.instance.log_suffix
165
+ end
166
+
167
+ def emit_csv(d)
168
+ d.emit(RECORD_CSV_A, DEFAULT_TIME)
169
+ d.emit(RECORD_CSV_B, DEFAULT_TIME)
170
+ end
171
+ def emit_tsv(d)
172
+ d.emit(RECORD_TSV_A, DEFAULT_TIME)
173
+ d.emit(RECORD_TSV_B, DEFAULT_TIME)
174
+ end
175
+ def emit_json(d)
176
+ d.emit(RECORD_JSON_A, DEFAULT_TIME)
177
+ d.emit(RECORD_JSON_B, DEFAULT_TIME)
178
+ end
179
+ def emit_msgpack(d)
180
+ d.emit(RECORD_MSGPACK_A, DEFAULT_TIME)
181
+ d.emit(RECORD_MSGPACK_B, DEFAULT_TIME)
182
+ end
183
+
184
+ def test_format_csv
185
+ d_csv = create_driver_no_write(CONFIG_CSV)
186
+ emit_csv(d_csv)
187
+ d_csv.expect_format RECORD_CSV_A['log'] + "\n"
188
+ d_csv.expect_format RECORD_CSV_B['log'] + "\n"
189
+ d_csv.run
190
+ end
191
+ def test_format_tsv
192
+ d_tsv = create_driver_no_write(CONFIG_TSV)
193
+ emit_tsv(d_tsv)
194
+ d_tsv.expect_format RECORD_TSV_A['log'] + "\n"
195
+ d_tsv.expect_format RECORD_TSV_B['log'] + "\n"
196
+ d_tsv.run
197
+ end
198
+ def test_format_json
199
+ d_json = create_driver_no_write(CONFIG_JSON)
200
+ emit_json(d_json)
201
+ d_json.expect_format RECORD_JSON_A.to_msgpack
202
+ d_json.expect_format RECORD_JSON_B.to_msgpack
203
+ d_json.run
204
+ end
205
+
206
+ def test_format_msgpack
207
+ d_msgpack = create_driver_no_write(CONFIG_MSGPACK)
208
+ emit_msgpack(d_msgpack)
209
+ d_msgpack.expect_format({ 'log' => RECORD_MSGPACK_A }.to_msgpack)
210
+ d_msgpack.expect_format({ 'log' => RECORD_MSGPACK_B }.to_msgpack)
211
+ d_msgpack.run
212
+ end
213
+
214
+ class PGConnectionMock
215
+ def initialize(options = {})
216
+ @return_keys = options[:return_keys] || ['key_a', 'key_b', 'key_c', 'key_d', 'key_e', 'key_f', 'key_g', 'key_h']
217
+ @target_schema = options[:schemaname] || nil
218
+ @target_table = options[:tablename] || 'test_table'
219
+ end
220
+
221
+ def expected_column_list_query
222
+ if @target_schema
223
+ /\Aselect column_name from INFORMATION_SCHEMA.COLUMNS where table_schema = '#{@target_schema}' and table_name = '#{@target_table}'/
224
+ else
225
+ /\Aselect column_name from INFORMATION_SCHEMA.COLUMNS where table_name = '#{@target_table}'/
226
+ end
227
+ end
228
+
229
+ def expected_copy_query
230
+ if @target_schema
231
+ /\Acopy #{@target_schema}.#{@target_table} from/
232
+ else
233
+ /\Acopy #{@target_table} from/
234
+ end
235
+ end
236
+
237
+ def exec(sql, &block)
238
+ if block_given?
239
+ if sql =~ expected_column_list_query
240
+ yield @return_keys.collect{|key| {'column_name' => key}}
241
+ else
242
+ yield []
243
+ end
244
+ else
245
+ unless sql =~ expected_copy_query
246
+ error = PG::Error.new("ERROR: Load into table '#{@target_table}' failed. Check 'stl_load_errors' system table for details.")
247
+ error.result = "ERROR: Load into table '#{@target_table}' failed. Check 'stl_load_errors' system table for details."
248
+ raise error
249
+ end
250
+ end
251
+ end
252
+
253
+ def close
254
+ end
255
+ end
256
+
257
+ def setup_pg_mock
258
+ # create mock of PG
259
+ def PG.connect(dbinfo)
260
+ return PGConnectionMock.new
261
+ end
262
+ end
263
+
264
+ def setup_s3_mock(expected_data)
265
+ current_time = Time.now
266
+
267
+ # create mock of s3 object
268
+ s3obj = flexmock(AWS::S3::S3Object)
269
+ s3obj.should_receive(:exists?).with_any_args.and_return { false }
270
+ s3obj.should_receive(:write).with(
271
+ # pathname
272
+ on { |pathname|
273
+ data = nil
274
+ pathname.open { |f|
275
+ gz = Zlib::GzipReader.new(f)
276
+ data = gz.read
277
+ gz.close
278
+ }
279
+ assert_equal expected_data, data
280
+ },
281
+ :acl => :bucket_owner_full_control
282
+ ).and_return { true }
283
+
284
+ # create mock of s3 object collection
285
+ s3obj_col = flexmock(AWS::S3::ObjectCollection)
286
+ s3obj_col.should_receive(:[]).with(
287
+ on { |key|
288
+ expected_key = current_time.utc.strftime("log/year=%Y/month=%m/day=%d/hour=%H/%Y%m%d-%H%M_00.gz")
289
+ key == expected_key
290
+ }).
291
+ and_return {
292
+ s3obj
293
+ }
294
+
295
+ # create mock of s3 bucket
296
+ flexmock(AWS::S3::Bucket).new_instances do |bucket|
297
+ bucket.should_receive(:objects).with_any_args.
298
+ and_return {
299
+ s3obj_col
300
+ }
301
+ end
302
+ end
303
+
304
+ def setup_tempfile_mock_to_be_closed
305
+ flexmock(Tempfile).new_instances.should_receive(:close!).at_least.once
306
+ end
307
+
308
+ def setup_mocks(expected_data)
309
+ setup_pg_mock
310
+ setup_s3_mock(expected_data) end
311
+
312
+ def test_write_with_csv
313
+ setup_mocks(%[val_a,val_b,val_c,val_d\nval_e,val_f,val_g,val_h\n])
314
+ setup_tempfile_mock_to_be_closed
315
+ d_csv = create_driver
316
+ emit_csv(d_csv)
317
+ assert_equal true, d_csv.run
318
+ end
319
+
320
+ def test_write_with_json
321
+ setup_mocks(%[val_a\tval_b\t\t\t\t\t\t\n\t\tval_c\tval_d\t\t\t\t\n])
322
+ setup_tempfile_mock_to_be_closed
323
+ d_json = create_driver(CONFIG_JSON)
324
+ emit_json(d_json)
325
+ assert_equal true, d_json.run
326
+ end
327
+
328
+ def test_write_with_json_hash_value
329
+ setup_mocks("val_a\t{\"foo\":\"var\"}\t\t\t\t\t\t\n\t\tval_c\tval_d\t\t\t\t\n")
330
+ d_json = create_driver(CONFIG_JSON)
331
+ d_json.emit({"log" => %[{"key_a" : "val_a", "key_b" : {"foo" : "var"}}]} , DEFAULT_TIME)
332
+ d_json.emit(RECORD_JSON_B, DEFAULT_TIME)
333
+ assert_equal true, d_json.run
334
+ end
335
+
336
+ def test_write_with_json_array_value
337
+ setup_mocks("val_a\t[\"foo\",\"var\"]\t\t\t\t\t\t\n\t\tval_c\tval_d\t\t\t\t\n")
338
+ d_json = create_driver(CONFIG_JSON)
339
+ d_json.emit({"log" => %[{"key_a" : "val_a", "key_b" : ["foo", "var"]}]} , DEFAULT_TIME)
340
+ d_json.emit(RECORD_JSON_B, DEFAULT_TIME)
341
+ assert_equal true, d_json.run
342
+ end
343
+
344
+ def test_write_with_json_including_tab_newline_quote
345
+ setup_mocks("val_a_with_\\\t_tab_\\\n_newline\tval_b_with_\\\\_quote\t\t\t\t\t\t\n\t\tval_c\tval_d\t\t\t\t\n")
346
+ d_json = create_driver(CONFIG_JSON)
347
+ d_json.emit({"log" => %[{"key_a" : "val_a_with_\\t_tab_\\n_newline", "key_b" : "val_b_with_\\\\_quote"}]} , DEFAULT_TIME)
348
+ d_json.emit(RECORD_JSON_B, DEFAULT_TIME)
349
+ assert_equal true, d_json.run
350
+ end
351
+
352
+ def test_write_with_json_no_data
353
+ setup_mocks("")
354
+ d_json = create_driver(CONFIG_JSON)
355
+ d_json.emit("", DEFAULT_TIME)
356
+ d_json.emit("", DEFAULT_TIME)
357
+ assert_equal false, d_json.run
358
+ end
359
+
360
+ def test_write_with_json_invalid_one_line
361
+ setup_mocks(%[\t\tval_c\tval_d\t\t\t\t\n])
362
+ d_json = create_driver(CONFIG_JSON)
363
+ d_json.emit({"log" => %[}}]}, DEFAULT_TIME)
364
+ d_json.emit(RECORD_JSON_B, DEFAULT_TIME)
365
+ assert_equal true, d_json.run
366
+ end
367
+
368
+ def test_write_with_json_no_available_data
369
+ setup_mocks(%[val_a\tval_b\t\t\t\t\t\t\n])
370
+ d_json = create_driver(CONFIG_JSON)
371
+ d_json.emit(RECORD_JSON_A, DEFAULT_TIME)
372
+ d_json.emit({"log" => %[{"key_o" : "val_o", "key_p" : "val_p"}]}, DEFAULT_TIME)
373
+ assert_equal true, d_json.run
374
+ end
375
+
376
+ def test_write_with_msgpack
377
+ setup_mocks(%[val_a\tval_b\t\t\t\t\t\t\n\t\tval_c\tval_d\t\t\t\t\n])
378
+ d_msgpack = create_driver(CONFIG_MSGPACK)
379
+ emit_msgpack(d_msgpack)
380
+ assert_equal true, d_msgpack.run
381
+ end
382
+
383
+ def test_write_with_msgpack_hash_value
384
+ setup_mocks("val_a\t{\"foo\":\"var\"}\t\t\t\t\t\t\n\t\tval_c\tval_d\t\t\t\t\n")
385
+ d_msgpack = create_driver(CONFIG_MSGPACK)
386
+ d_msgpack.emit({"key_a" => "val_a", "key_b" => {"foo" => "var"}} , DEFAULT_TIME)
387
+ d_msgpack.emit(RECORD_MSGPACK_B, DEFAULT_TIME)
388
+ assert_equal true, d_msgpack.run
389
+ end
390
+
391
+ def test_write_with_msgpack_array_value
392
+ setup_mocks("val_a\t[\"foo\",\"var\"]\t\t\t\t\t\t\n\t\tval_c\tval_d\t\t\t\t\n")
393
+ d_msgpack = create_driver(CONFIG_MSGPACK)
394
+ d_msgpack.emit({"key_a" => "val_a", "key_b" => ["foo", "var"]} , DEFAULT_TIME)
395
+ d_msgpack.emit(RECORD_MSGPACK_B, DEFAULT_TIME)
396
+ assert_equal true, d_msgpack.run
397
+ end
398
+
399
+ def test_write_with_msgpack_including_tab_newline_quote
400
+ setup_mocks("val_a_with_\\\t_tab_\\\n_newline\tval_b_with_\\\\_quote\t\t\t\t\t\t\n\t\tval_c\tval_d\t\t\t\t\n")
401
+ d_msgpack = create_driver(CONFIG_MSGPACK)
402
+ d_msgpack.emit({"key_a" => "val_a_with_\t_tab_\n_newline", "key_b" => "val_b_with_\\_quote"} , DEFAULT_TIME)
403
+ d_msgpack.emit(RECORD_MSGPACK_B, DEFAULT_TIME)
404
+ assert_equal true, d_msgpack.run
405
+ end
406
+
407
+ def test_write_with_msgpack_no_data
408
+ setup_mocks("")
409
+ d_msgpack = create_driver(CONFIG_MSGPACK)
410
+ d_msgpack.emit({}, DEFAULT_TIME)
411
+ d_msgpack.emit({}, DEFAULT_TIME)
412
+ assert_equal false, d_msgpack.run
413
+ end
414
+
415
+ def test_write_with_msgpack_no_available_data
416
+ setup_mocks(%[val_a\tval_b\t\t\t\t\t\t\n])
417
+ d_msgpack = create_driver(CONFIG_MSGPACK)
418
+ d_msgpack.emit(RECORD_MSGPACK_A, DEFAULT_TIME)
419
+ d_msgpack.emit({"key_o" => "val_o", "key_p" => "val_p"}, DEFAULT_TIME)
420
+ assert_equal true, d_msgpack.run
421
+ end
422
+
423
+ def test_write_redshift_connection_error
424
+ def PG.connect(dbinfo)
425
+ return Class.new do
426
+ def initialize(return_keys=[]); end
427
+ def exec(sql)
428
+ raise PG::Error, "redshift connection error"
429
+ end
430
+ def close; end
431
+ end.new
432
+ end
433
+ setup_s3_mock(%[val_a,val_b,val_c,val_d\nval_e,val_f,val_g,val_h\n])
434
+
435
+ d_csv = create_driver
436
+ emit_csv(d_csv)
437
+ assert_raise(PG::Error) {
438
+ d_csv.run
439
+ }
440
+ end
441
+
442
+ def test_write_redshift_load_error
443
+ PG::Error.module_eval { attr_accessor :result}
444
+ def PG.connect(dbinfo)
445
+ return Class.new do
446
+ def initialize(return_keys=[]); end
447
+ def exec(sql)
448
+ error = PG::Error.new("ERROR: Load into table 'apache_log' failed. Check 'stl_load_errors' system table for details.")
449
+ error.result = "ERROR: Load into table 'apache_log' failed. Check 'stl_load_errors' system table for details."
450
+ raise error
451
+ end
452
+ def close; end
453
+ end.new
454
+ end
455
+ setup_s3_mock(%[val_a,val_b,val_c,val_d\nval_e,val_f,val_g,val_h\n])
456
+
457
+ d_csv = create_driver
458
+ emit_csv(d_csv)
459
+ assert_equal false, d_csv.run
460
+ end
461
+
462
+ def test_write_with_json_redshift_connection_error
463
+ def PG.connect(dbinfo)
464
+ return Class.new do
465
+ def initialize(return_keys=[]); end
466
+ def exec(sql, &block)
467
+ error = PG::Error.new("redshift connection error")
468
+ raise error
469
+ end
470
+ def close; end
471
+ end.new
472
+ end
473
+ setup_s3_mock(%[val_a,val_b,val_c,val_d\nval_e,val_f,val_g,val_h\n])
474
+
475
+ d_json = create_driver(CONFIG_JSON)
476
+ emit_json(d_json)
477
+ assert_raise(PG::Error) {
478
+ d_json.run
479
+ }
480
+ end
481
+
482
+ def test_write_with_json_no_table_on_redshift
483
+ def PG.connect(dbinfo)
484
+ return Class.new do
485
+ def initialize(return_keys=[]); end
486
+ def exec(sql, &block)
487
+ yield [] if block_given?
488
+ end
489
+ def close; end
490
+ end.new
491
+ end
492
+ setup_s3_mock(%[val_a,val_b,val_c,val_d\nval_e,val_f,val_g,val_h\n])
493
+
494
+ d_json = create_driver(CONFIG_JSON)
495
+ emit_json(d_json)
496
+ assert_equal false, d_json.run
497
+ end
498
+
499
+ def test_write_with_json_failed_to_get_columns
500
+ def PG.connect(dbinfo)
501
+ return Class.new do
502
+ def initialize(return_keys=[]); end
503
+ def exec(sql, &block)
504
+ end
505
+ def close; end
506
+ end.new
507
+ end
508
+ setup_s3_mock("")
509
+
510
+ d_json = create_driver(CONFIG_JSON)
511
+ emit_json(d_json)
512
+ assert_raise(RuntimeError, "failed to fetch the redshift table definition.") {
513
+ d_json.run
514
+ }
515
+ end
516
+
517
+ def test_write_with_json_fetch_column_with_schema
518
+ def PG.connect(dbinfo)
519
+ return PGConnectionMock.new(:schemaname => 'test_schema')
520
+ end
521
+ setup_s3_mock(%[val_a\tval_b\t\t\t\t\t\t\n\t\tval_c\tval_d\t\t\t\t\n])
522
+ d_json = create_driver(CONFIG_JSON_WITH_SCHEMA)
523
+ emit_json(d_json)
524
+ assert_equal true, d_json.run
525
+ end
526
+ end
@@ -0,0 +1,8 @@
1
+ if ENV['COVERAGE']
2
+ require 'simplecov'
3
+ SimpleCov.start do
4
+ add_filter 'test/'
5
+ add_filter 'pkg/'
6
+ add_filter 'vendor/'
7
+ end
8
+ end
metadata ADDED
@@ -0,0 +1,137 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: fluent-plugin-redshift-anton
3
+ version: !ruby/object:Gem::Version
4
+ version: 1.0.1
5
+ platform: ruby
6
+ authors:
7
+ - Anton Kuchinsky
8
+ autorequire:
9
+ bindir: bin
10
+ cert_chain: []
11
+ date: 2015-04-11 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ name: fluentd
15
+ requirement: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - '>='
18
+ - !ruby/object:Gem::Version
19
+ version: 0.10.0
20
+ type: :runtime
21
+ prerelease: false
22
+ version_requirements: !ruby/object:Gem::Requirement
23
+ requirements:
24
+ - - '>='
25
+ - !ruby/object:Gem::Version
26
+ version: 0.10.0
27
+ - !ruby/object:Gem::Dependency
28
+ name: aws-sdk
29
+ requirement: !ruby/object:Gem::Requirement
30
+ requirements:
31
+ - - '>='
32
+ - !ruby/object:Gem::Version
33
+ version: 1.6.3
34
+ type: :runtime
35
+ prerelease: false
36
+ version_requirements: !ruby/object:Gem::Requirement
37
+ requirements:
38
+ - - '>='
39
+ - !ruby/object:Gem::Version
40
+ version: 1.6.3
41
+ - !ruby/object:Gem::Dependency
42
+ name: pg
43
+ requirement: !ruby/object:Gem::Requirement
44
+ requirements:
45
+ - - '>='
46
+ - !ruby/object:Gem::Version
47
+ version: 0.14.0
48
+ type: :runtime
49
+ prerelease: false
50
+ version_requirements: !ruby/object:Gem::Requirement
51
+ requirements:
52
+ - - '>='
53
+ - !ruby/object:Gem::Version
54
+ version: 0.14.0
55
+ - !ruby/object:Gem::Dependency
56
+ name: rake
57
+ requirement: !ruby/object:Gem::Requirement
58
+ requirements:
59
+ - - '>='
60
+ - !ruby/object:Gem::Version
61
+ version: '0'
62
+ type: :development
63
+ prerelease: false
64
+ version_requirements: !ruby/object:Gem::Requirement
65
+ requirements:
66
+ - - '>='
67
+ - !ruby/object:Gem::Version
68
+ version: '0'
69
+ - !ruby/object:Gem::Dependency
70
+ name: simplecov
71
+ requirement: !ruby/object:Gem::Requirement
72
+ requirements:
73
+ - - '>='
74
+ - !ruby/object:Gem::Version
75
+ version: 0.5.4
76
+ type: :development
77
+ prerelease: false
78
+ version_requirements: !ruby/object:Gem::Requirement
79
+ requirements:
80
+ - - '>='
81
+ - !ruby/object:Gem::Version
82
+ version: 0.5.4
83
+ - !ruby/object:Gem::Dependency
84
+ name: flexmock
85
+ requirement: !ruby/object:Gem::Requirement
86
+ requirements:
87
+ - - '>='
88
+ - !ruby/object:Gem::Version
89
+ version: 1.3.1
90
+ type: :development
91
+ prerelease: false
92
+ version_requirements: !ruby/object:Gem::Requirement
93
+ requirements:
94
+ - - '>='
95
+ - !ruby/object:Gem::Version
96
+ version: 1.3.1
97
+ description: Amazon Redshift output plugin for Fluentd with creating table
98
+ email:
99
+ - akuchinsky@gmail.com
100
+ executables: []
101
+ extensions: []
102
+ extra_rdoc_files: []
103
+ files:
104
+ - Gemfile
105
+ - README.md
106
+ - Rakefile
107
+ - VERSION
108
+ - fluent-plugin-redshift-anton.gemspec
109
+ - lib/fluent/plugin/out_redshift_auto.rb
110
+ - test/plugin/test_out_redshift_auto.rb
111
+ - test/test_helper.rb
112
+ homepage: https://github.com/akuchins/fluent-plugin-redshift-anton
113
+ licenses: []
114
+ metadata: {}
115
+ post_install_message:
116
+ rdoc_options: []
117
+ require_paths:
118
+ - lib
119
+ required_ruby_version: !ruby/object:Gem::Requirement
120
+ requirements:
121
+ - - '>='
122
+ - !ruby/object:Gem::Version
123
+ version: '0'
124
+ required_rubygems_version: !ruby/object:Gem::Requirement
125
+ requirements:
126
+ - - '>='
127
+ - !ruby/object:Gem::Version
128
+ version: '0'
129
+ requirements: []
130
+ rubyforge_project:
131
+ rubygems_version: 2.0.14
132
+ signing_key:
133
+ specification_version: 4
134
+ summary: Amazon Redshift output plugin for Fluentd with creating table
135
+ test_files:
136
+ - test/plugin/test_out_redshift_auto.rb
137
+ - test/test_helper.rb