fluent-plugin-mysql-2 0.3.7
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.gitignore +18 -0
- data/.travis.yml +14 -0
- data/Gemfile +4 -0
- data/LICENSE.txt +13 -0
- data/README.md +350 -0
- data/README_mysql.md +140 -0
- data/Rakefile +11 -0
- data/fluent-plugin-mysql.gemspec +24 -0
- data/lib/fluent/plugin/out_mysql.rb +126 -0
- data/lib/fluent/plugin/out_mysql_bulk.rb +234 -0
- data/test/helper.rb +29 -0
- data/test/plugin/test_out_mysql.rb +199 -0
- data/test/plugin/test_out_mysql_bulk.rb +360 -0
- metadata +167 -0
@@ -0,0 +1,126 @@
|
|
1
|
+
class Fluent::MysqlOutput < Fluent::BufferedOutput
|
2
|
+
Fluent::Plugin.register_output('mysql', self)
|
3
|
+
|
4
|
+
include Fluent::SetTimeKeyMixin
|
5
|
+
include Fluent::SetTagKeyMixin
|
6
|
+
|
7
|
+
config_param :host, :string
|
8
|
+
config_param :port, :integer, :default => nil
|
9
|
+
config_param :database, :string
|
10
|
+
config_param :username, :string
|
11
|
+
config_param :password, :string, :default => '', :secret => true
|
12
|
+
config_param :sslkey, :string, :default => nil
|
13
|
+
config_param :sslcert, :string, :default => nil
|
14
|
+
config_param :sslca, :string, :default => nil
|
15
|
+
config_param :sslcapath, :string, :default => nil
|
16
|
+
config_param :sslcipher, :string, :default => nil
|
17
|
+
config_param :sslverify, :bool, :default => nil
|
18
|
+
|
19
|
+
config_param :key_names, :string, :default => nil # nil allowed for json format
|
20
|
+
config_param :sql, :string, :default => nil
|
21
|
+
config_param :table, :string, :default => nil
|
22
|
+
config_param :columns, :string, :default => nil
|
23
|
+
|
24
|
+
config_param :format, :string, :default => "raw" # or json
|
25
|
+
|
26
|
+
attr_accessor :handler
|
27
|
+
|
28
|
+
def initialize
|
29
|
+
super
|
30
|
+
require 'mysql2-cs-bind'
|
31
|
+
require 'jsonpath'
|
32
|
+
end
|
33
|
+
|
34
|
+
# Define `log` method for v0.10.42 or earlier
|
35
|
+
unless method_defined?(:log)
|
36
|
+
define_method("log") { $log }
|
37
|
+
end
|
38
|
+
|
39
|
+
def configure(conf)
|
40
|
+
super
|
41
|
+
|
42
|
+
log.warn "[mysql] This plugin deprecated. You should use mysql_bulk."
|
43
|
+
|
44
|
+
# TODO tag_mapped
|
45
|
+
|
46
|
+
case @format
|
47
|
+
when 'json'
|
48
|
+
@format_proc = Proc.new{|tag, time, record| record.to_json}
|
49
|
+
when 'jsonpath'
|
50
|
+
@key_names = @key_names.split(/\s*,\s*/)
|
51
|
+
@format_proc = Proc.new do |tag, time, record|
|
52
|
+
json = record.to_json
|
53
|
+
@key_names.map do |k|
|
54
|
+
JsonPath.new(k.strip).on(json).first
|
55
|
+
end
|
56
|
+
end
|
57
|
+
else
|
58
|
+
@key_names = @key_names.split(/\s*,\s*/)
|
59
|
+
@format_proc = Proc.new{|tag, time, record| @key_names.map{|k| record[k]}}
|
60
|
+
end
|
61
|
+
|
62
|
+
if @columns.nil? and @sql.nil?
|
63
|
+
raise Fluent::ConfigError, "columns or sql MUST be specified, but missing"
|
64
|
+
end
|
65
|
+
if @columns and @sql
|
66
|
+
raise Fluent::ConfigError, "both of columns and sql are specified, but specify one of them"
|
67
|
+
end
|
68
|
+
|
69
|
+
if @sql
|
70
|
+
begin
|
71
|
+
if @format == 'json'
|
72
|
+
Mysql2::Client.pseudo_bind(@sql, [nil])
|
73
|
+
else
|
74
|
+
Mysql2::Client.pseudo_bind(@sql, @key_names.map{|n| nil})
|
75
|
+
end
|
76
|
+
rescue ArgumentError => e
|
77
|
+
raise Fluent::ConfigError, "mismatch between sql placeholders and key_names"
|
78
|
+
end
|
79
|
+
else # columns
|
80
|
+
raise Fluent::ConfigError, "table missing" unless @table
|
81
|
+
@columns = @columns.split(/\s*,\s*/)
|
82
|
+
cols = @columns.join(',')
|
83
|
+
placeholders = if @format == 'json'
|
84
|
+
'?'
|
85
|
+
else
|
86
|
+
@key_names.map{|k| '?'}.join(',')
|
87
|
+
end
|
88
|
+
@sql = "INSERT INTO #{@table} (#{cols}) VALUES (#{placeholders})"
|
89
|
+
end
|
90
|
+
end
|
91
|
+
|
92
|
+
def start
|
93
|
+
super
|
94
|
+
end
|
95
|
+
|
96
|
+
def shutdown
|
97
|
+
super
|
98
|
+
end
|
99
|
+
|
100
|
+
def format(tag, time, record)
|
101
|
+
[tag, time, @format_proc.call(tag, time, record)].to_msgpack
|
102
|
+
end
|
103
|
+
|
104
|
+
def client
|
105
|
+
Mysql2::Client.new({
|
106
|
+
:host => @host, :port => @port,
|
107
|
+
:username => @username, :password => @password,
|
108
|
+
:database => @database,
|
109
|
+
:sslkey => @sslkey,
|
110
|
+
:sslcert => @sslcert,
|
111
|
+
:sslca => @sslca,
|
112
|
+
:sslcapath => @sslcapath,
|
113
|
+
:sslcipher => @sslcipher,
|
114
|
+
:sslverify => @sslverify,
|
115
|
+
:flags => Mysql2::Client::MULTI_STATEMENTS,
|
116
|
+
})
|
117
|
+
end
|
118
|
+
|
119
|
+
def write(chunk)
|
120
|
+
handler = self.client
|
121
|
+
chunk.msgpack_each { |tag, time, data|
|
122
|
+
handler.xquery(@sql, data)
|
123
|
+
}
|
124
|
+
handler.close
|
125
|
+
end
|
126
|
+
end
|
@@ -0,0 +1,234 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'fluent/plugin/output'
|
4
|
+
require 'oj'
|
5
|
+
|
6
|
+
module Fluent::Plugin
|
7
|
+
class MysqlBulkOutput < Output
|
8
|
+
Fluent::Plugin.register_output('mysql_bulk', self)
|
9
|
+
|
10
|
+
helpers :compat_parameters, :inject
|
11
|
+
|
12
|
+
config_param :host, :string, default: '127.0.0.1',
|
13
|
+
desc: "Database host."
|
14
|
+
config_param :port, :integer, default: 3306,
|
15
|
+
desc: "Database port."
|
16
|
+
config_param :database, :string,
|
17
|
+
desc: "Database name."
|
18
|
+
config_param :username, :string,
|
19
|
+
desc: "Database user."
|
20
|
+
config_param :password, :string, default: '', secret: true,
|
21
|
+
desc: "Database password."
|
22
|
+
config_param :sslkey, :string, default: nil,
|
23
|
+
desc: "SSL key."
|
24
|
+
config_param :sslcert, :string, default: nil,
|
25
|
+
desc: "SSL cert."
|
26
|
+
config_param :sslca, :string, default: nil,
|
27
|
+
desc: "SSL CA."
|
28
|
+
config_param :sslcapath, :string, default: nil,
|
29
|
+
desc: "SSL CA path."
|
30
|
+
config_param :sslcipher, :string, default: nil,
|
31
|
+
desc: "SSL cipher."
|
32
|
+
config_param :sslverify, :bool, default: nil,
|
33
|
+
desc: "SSL Verify Server Certificate."
|
34
|
+
|
35
|
+
config_param :column_names, :string,
|
36
|
+
desc: "Bulk insert column."
|
37
|
+
config_param :key_names, :string, default: nil,
|
38
|
+
desc: <<-DESC
|
39
|
+
Value key names, ${time} is placeholder Time.at(time).strftime("%Y-%m-%d %H:%M:%S").
|
40
|
+
DESC
|
41
|
+
config_param :json_key_names, :string, default: nil,
|
42
|
+
desc: "Key names which store data as json"
|
43
|
+
config_param :table, :string,
|
44
|
+
desc: "Bulk insert table."
|
45
|
+
|
46
|
+
config_param :unixtimestamp_key_names, :string, default: nil,
|
47
|
+
desc: "Key names which store data as datetime from unix time stamp"
|
48
|
+
|
49
|
+
config_param :on_duplicate_key_update, :bool, default: false,
|
50
|
+
desc: "On duplicate key update enable."
|
51
|
+
config_param :on_duplicate_update_keys, :string, default: nil,
|
52
|
+
desc: "On duplicate key update column, comma separator."
|
53
|
+
config_param :on_duplicate_update_custom_values, :string, default: nil,
|
54
|
+
desc: "On_duplicate_update_custom_values, comma separator. specify the column name is insert value, custom value is use ${sql conditions}"
|
55
|
+
config_param :insert_ignore, :bool, default: false,
|
56
|
+
:desc => "Use INSERT IGNORE"
|
57
|
+
config_param :max_rows_per_insert, :integer, default: 0,
|
58
|
+
:desc => "Maximum number of rows to insert in each statement"
|
59
|
+
|
60
|
+
config_param :transaction_isolation_level, :enum, list: [:read_uncommitted, :read_committed, :repeatable_read, :serializable], default: nil,
|
61
|
+
desc: "Set transaction isolation level."
|
62
|
+
|
63
|
+
attr_accessor :handler
|
64
|
+
|
65
|
+
def initialize
|
66
|
+
super
|
67
|
+
require 'mysql2-cs-bind'
|
68
|
+
end
|
69
|
+
|
70
|
+
def configure(conf)
|
71
|
+
compat_parameters_convert(conf, :buffer, :inject)
|
72
|
+
super
|
73
|
+
|
74
|
+
if @column_names.nil?
|
75
|
+
fail Fluent::ConfigError, 'column_names MUST specified, but missing'
|
76
|
+
end
|
77
|
+
|
78
|
+
if @on_duplicate_key_update
|
79
|
+
if @on_duplicate_update_keys.nil?
|
80
|
+
fail Fluent::ConfigError, 'on_duplicate_key_update = true , on_duplicate_update_keys nil!'
|
81
|
+
end
|
82
|
+
@on_duplicate_update_keys = @on_duplicate_update_keys.split(',')
|
83
|
+
|
84
|
+
if !@on_duplicate_update_custom_values.nil?
|
85
|
+
@on_duplicate_update_custom_values = @on_duplicate_update_custom_values.split(',')
|
86
|
+
if @on_duplicate_update_custom_values.length != @on_duplicate_update_keys.length
|
87
|
+
fail Fluent::ConfigError, <<-DESC
|
88
|
+
on_duplicate_update_keys and on_duplicate_update_custom_values must be the same length
|
89
|
+
DESC
|
90
|
+
end
|
91
|
+
end
|
92
|
+
|
93
|
+
@on_duplicate_key_update_sql = ' ON DUPLICATE KEY UPDATE '
|
94
|
+
updates = []
|
95
|
+
@on_duplicate_update_keys.each_with_index do |update_column, i|
|
96
|
+
if @on_duplicate_update_custom_values.nil? || @on_duplicate_update_custom_values[i] == "#{update_column}"
|
97
|
+
updates << "#{update_column} = VALUES(#{update_column})"
|
98
|
+
else
|
99
|
+
value = @on_duplicate_update_custom_values[i].to_s.match(/\${(.*)}/)[1]
|
100
|
+
escape_value = Mysql2::Client.escape(value)
|
101
|
+
updates << "#{update_column} = #{escape_value}"
|
102
|
+
end
|
103
|
+
end
|
104
|
+
@on_duplicate_key_update_sql += updates.join(',')
|
105
|
+
end
|
106
|
+
|
107
|
+
@column_names = @column_names.split(',').collect(&:strip)
|
108
|
+
@key_names = @key_names.nil? ? @column_names : @key_names.split(',').collect(&:strip)
|
109
|
+
@values_template = "(#{ @column_names.map { |key| '?' }.join(',') })"
|
110
|
+
@insert_columns = @column_names.map{|x| "`#{x.to_s.gsub('`', '``')}`"}.join(',')
|
111
|
+
@json_key_names = @json_key_names.split(',') if @json_key_names
|
112
|
+
@unixtimestamp_key_names = @unixtimestamp_key_names.split(',') if @unixtimestamp_key_names
|
113
|
+
end
|
114
|
+
|
115
|
+
def check_table_schema(database: @database, table: @table)
|
116
|
+
_client = client(database)
|
117
|
+
result = _client.xquery("SHOW COLUMNS FROM #{table}")
|
118
|
+
max_lengths = []
|
119
|
+
@column_names.each do |column|
|
120
|
+
info = result.select { |x| x['Field'] == column }.first
|
121
|
+
r = /(char|varchar)\(([\d]+)\)/
|
122
|
+
begin
|
123
|
+
max_length = info['Type'].scan(r)[0][1].to_i
|
124
|
+
rescue
|
125
|
+
max_length = nil
|
126
|
+
end
|
127
|
+
max_lengths << max_length
|
128
|
+
end
|
129
|
+
max_lengths
|
130
|
+
ensure
|
131
|
+
if not _client.nil? then _client.close end
|
132
|
+
end
|
133
|
+
|
134
|
+
def format(tag, time, record)
|
135
|
+
record = inject_values_to_record(tag, time, record)
|
136
|
+
[tag, time, record].to_msgpack
|
137
|
+
end
|
138
|
+
|
139
|
+
def formatted_to_msgpack_binary
|
140
|
+
true
|
141
|
+
end
|
142
|
+
|
143
|
+
def multi_workers_ready?
|
144
|
+
true
|
145
|
+
end
|
146
|
+
|
147
|
+
def client(database)
|
148
|
+
Mysql2::Client.new(
|
149
|
+
host: @host,
|
150
|
+
port: @port,
|
151
|
+
username: @username,
|
152
|
+
password: @password,
|
153
|
+
database: database,
|
154
|
+
sslkey: @sslkey,
|
155
|
+
sslcert: @sslcert,
|
156
|
+
sslca: @sslca,
|
157
|
+
sslcapath: @sslcapath,
|
158
|
+
sslcipher: @sslcipher,
|
159
|
+
sslverify: @sslverify,
|
160
|
+
flags: Mysql2::Client::MULTI_STATEMENTS
|
161
|
+
)
|
162
|
+
end
|
163
|
+
|
164
|
+
def expand_placeholders(metadata)
|
165
|
+
database = extract_placeholders(@database, metadata).gsub('.', '_')
|
166
|
+
table = extract_placeholders(@table, metadata).gsub('.', '_')
|
167
|
+
return database, table
|
168
|
+
end
|
169
|
+
|
170
|
+
def write(chunk)
|
171
|
+
database, table = expand_placeholders(chunk.metadata)
|
172
|
+
max_lengths = check_table_schema(database: database, table: table)
|
173
|
+
@handler = client(database)
|
174
|
+
values = []
|
175
|
+
chunk.msgpack_each do |tag, time, data|
|
176
|
+
data = format_proc.call(tag, time, data, max_lengths)
|
177
|
+
values << Mysql2::Client.pseudo_bind(@values_template, data)
|
178
|
+
end
|
179
|
+
|
180
|
+
@handler.query("SET SESSION TRANSACTION ISOLATION LEVEL #{transaction_isolation_level}") if @transaction_isolation_level
|
181
|
+
slice_size = @max_rows_per_insert > 0 ? @max_rows_per_insert : values.length
|
182
|
+
values.each_slice(slice_size) do |slice|
|
183
|
+
sql = "INSERT #{@insert_ignore ? "IGNORE" : ""} INTO #{table} (#{@insert_columns}) VALUES #{values.join(',')}"
|
184
|
+
sql += @on_duplicate_key_update_sql if @on_duplicate_key_update
|
185
|
+
|
186
|
+
@handler.xquery(sql)
|
187
|
+
end
|
188
|
+
log.info "bulk insert values size (table: #{@table}) => #{values.size}"
|
189
|
+
@handler.close
|
190
|
+
end
|
191
|
+
|
192
|
+
private
|
193
|
+
|
194
|
+
def format_proc
|
195
|
+
proc do |tag, time, record, max_lengths|
|
196
|
+
values = []
|
197
|
+
@key_names.each_with_index do |key, i|
|
198
|
+
if key == '${time}'
|
199
|
+
value = Time.at(time).strftime('%Y-%m-%d %H:%M:%S')
|
200
|
+
else
|
201
|
+
if max_lengths[i].nil? || record[key].nil?
|
202
|
+
value = record[key]
|
203
|
+
else
|
204
|
+
value = record[key].to_s.slice(0, max_lengths[i])
|
205
|
+
end
|
206
|
+
|
207
|
+
if @json_key_names && @json_key_names.include?(key)
|
208
|
+
value = Oj.dump(value)
|
209
|
+
end
|
210
|
+
|
211
|
+
if @unixtimestamp_key_names && @unixtimestamp_key_names.include?(key)
|
212
|
+
value = Time.at(value).strftime('%Y-%m-%d %H:%M:%S')
|
213
|
+
end
|
214
|
+
end
|
215
|
+
values << value
|
216
|
+
end
|
217
|
+
values
|
218
|
+
end
|
219
|
+
end
|
220
|
+
|
221
|
+
def transaction_isolation_level
|
222
|
+
case @transaction_isolation_level
|
223
|
+
when :read_uncommitted
|
224
|
+
"READ UNCOMMITTED"
|
225
|
+
when :read_committed
|
226
|
+
"READ COMMITTED"
|
227
|
+
when :repeatable_read
|
228
|
+
"REPEATABLE READ"
|
229
|
+
when :serializable
|
230
|
+
"SERIALIZABLE"
|
231
|
+
end
|
232
|
+
end
|
233
|
+
end
|
234
|
+
end
|
data/test/helper.rb
ADDED
@@ -0,0 +1,29 @@
|
|
1
|
+
require 'rubygems'
|
2
|
+
require 'bundler'
|
3
|
+
begin
|
4
|
+
Bundler.setup(:default, :development)
|
5
|
+
rescue Bundler::BundlerError => e
|
6
|
+
$stderr.puts e.message
|
7
|
+
$stderr.puts "Run `bundle install` to install missing gems"
|
8
|
+
exit e.status_code
|
9
|
+
end
|
10
|
+
require 'test/unit'
|
11
|
+
|
12
|
+
$LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
|
13
|
+
$LOAD_PATH.unshift(File.dirname(__FILE__))
|
14
|
+
require 'fluent/test'
|
15
|
+
unless ENV.has_key?('VERBOSE')
|
16
|
+
nulllogger = Object.new
|
17
|
+
nulllogger.instance_eval {|obj|
|
18
|
+
def method_missing(method, *args)
|
19
|
+
# pass
|
20
|
+
end
|
21
|
+
}
|
22
|
+
$log = nulllogger
|
23
|
+
end
|
24
|
+
|
25
|
+
require 'fluent/plugin/out_mysql'
|
26
|
+
require 'fluent/plugin/out_mysql_bulk'
|
27
|
+
|
28
|
+
class Test::Unit::TestCase
|
29
|
+
end
|
@@ -0,0 +1,199 @@
|
|
1
|
+
require 'helper'
|
2
|
+
require 'mysql2-cs-bind'
|
3
|
+
|
4
|
+
class MysqlOutputTest < Test::Unit::TestCase
|
5
|
+
def setup
|
6
|
+
Fluent::Test.setup
|
7
|
+
end
|
8
|
+
|
9
|
+
CONFIG = %[
|
10
|
+
host db.local
|
11
|
+
database testing
|
12
|
+
username testuser
|
13
|
+
sql INSERT INTO tbl SET jsondata=?
|
14
|
+
format json
|
15
|
+
]
|
16
|
+
|
17
|
+
def create_driver(conf = CONFIG, tag='test')
|
18
|
+
d = Fluent::Test::BufferedOutputTestDriver.new(Fluent::MysqlOutput, tag).configure(conf)
|
19
|
+
d.instance.instance_eval {
|
20
|
+
def client
|
21
|
+
obj = Object.new
|
22
|
+
obj.instance_eval {
|
23
|
+
def xquery(*args); [1]; end
|
24
|
+
def close; true; end
|
25
|
+
}
|
26
|
+
obj
|
27
|
+
end
|
28
|
+
}
|
29
|
+
d
|
30
|
+
end
|
31
|
+
|
32
|
+
def test_configure
|
33
|
+
d = create_driver %[
|
34
|
+
host database.local
|
35
|
+
database foo
|
36
|
+
username bar
|
37
|
+
sql INSERT INTO baz SET jsondata=?
|
38
|
+
format json
|
39
|
+
]
|
40
|
+
d = create_driver %[
|
41
|
+
host database.local
|
42
|
+
database foo
|
43
|
+
username bar
|
44
|
+
table baz
|
45
|
+
columns jsondata
|
46
|
+
format json
|
47
|
+
]
|
48
|
+
d = create_driver %[
|
49
|
+
host database.local
|
50
|
+
database foo
|
51
|
+
username bar
|
52
|
+
password mogera
|
53
|
+
key_names field1,field2,field3
|
54
|
+
table baz
|
55
|
+
columns col1, col2 ,col3
|
56
|
+
]
|
57
|
+
assert_equal ['field1', 'field2', 'field3'], d.instance.key_names
|
58
|
+
assert_equal 'INSERT INTO baz (col1,col2,col3) VALUES (?,?,?)', d.instance.sql
|
59
|
+
d = create_driver %[
|
60
|
+
host database.local
|
61
|
+
database foo
|
62
|
+
username bar
|
63
|
+
password mogera
|
64
|
+
key_names field1 ,field2, field3
|
65
|
+
table baz
|
66
|
+
columns col1, col2 ,col3
|
67
|
+
]
|
68
|
+
assert_equal ['field1', 'field2', 'field3'], d.instance.key_names
|
69
|
+
assert_equal 'INSERT INTO baz (col1,col2,col3) VALUES (?,?,?)', d.instance.sql
|
70
|
+
|
71
|
+
assert_raise(Fluent::ConfigError) {
|
72
|
+
d = create_driver %[
|
73
|
+
host database.local
|
74
|
+
database foo
|
75
|
+
username bar
|
76
|
+
password mogera
|
77
|
+
key_names field1,field2,field3
|
78
|
+
sql INSERT INTO baz (col1,col2,col3,col4) VALUES (?,?,?,?)
|
79
|
+
]
|
80
|
+
}
|
81
|
+
|
82
|
+
|
83
|
+
end
|
84
|
+
|
85
|
+
def test_format
|
86
|
+
d = create_driver
|
87
|
+
|
88
|
+
time = Time.parse("2011-01-02 13:14:15 UTC").to_i
|
89
|
+
d.emit({"a"=>1}, time)
|
90
|
+
d.emit({"a"=>2}, time)
|
91
|
+
|
92
|
+
#d.expect_format %[2011-01-02T13:14:15Z\ttest\t{"a":1}\n]
|
93
|
+
#d.expect_format %[2011-01-02T13:14:15Z\ttest\t{"a":2}\n]
|
94
|
+
d.expect_format ['test', time, {"a" => 1}.to_json].to_msgpack
|
95
|
+
d.expect_format ['test', time, {"a" => 2}.to_json].to_msgpack
|
96
|
+
|
97
|
+
d.run
|
98
|
+
end
|
99
|
+
|
100
|
+
def test_time_and_tag_key
|
101
|
+
d = create_driver %[
|
102
|
+
host database.local
|
103
|
+
database foo
|
104
|
+
username bar
|
105
|
+
password mogera
|
106
|
+
include_time_key yes
|
107
|
+
utc
|
108
|
+
include_tag_key yes
|
109
|
+
table baz
|
110
|
+
key_names time,tag,field1,field2,field3,field4
|
111
|
+
sql INSERT INTO baz (coltime,coltag,col1,col2,col3,col4) VALUES (?,?,?,?,?,?)
|
112
|
+
]
|
113
|
+
assert_equal 'INSERT INTO baz (coltime,coltag,col1,col2,col3,col4) VALUES (?,?,?,?,?,?)', d.instance.sql
|
114
|
+
|
115
|
+
time = Time.parse('2012-12-17 01:23:45 UTC').to_i
|
116
|
+
record = {'field1'=>'value1','field2'=>'value2','field3'=>'value3','field4'=>'value4'}
|
117
|
+
d.emit(record, time)
|
118
|
+
d.expect_format ['test', time, ['2012-12-17T01:23:45Z','test','value1','value2','value3','value4']].to_msgpack
|
119
|
+
d.run
|
120
|
+
end
|
121
|
+
|
122
|
+
def test_time_and_tag_key_complex
|
123
|
+
d = create_driver %[
|
124
|
+
host database.local
|
125
|
+
database foo
|
126
|
+
username bar
|
127
|
+
password mogera
|
128
|
+
include_time_key yes
|
129
|
+
utc
|
130
|
+
time_format %Y%m%d-%H%M%S
|
131
|
+
time_key timekey
|
132
|
+
include_tag_key yes
|
133
|
+
tag_key tagkey
|
134
|
+
table baz
|
135
|
+
key_names timekey,tagkey,field1,field2,field3,field4
|
136
|
+
sql INSERT INTO baz (coltime,coltag,col1,col2,col3,col4) VALUES (?,?,?,?,?,?)
|
137
|
+
]
|
138
|
+
assert_equal 'INSERT INTO baz (coltime,coltag,col1,col2,col3,col4) VALUES (?,?,?,?,?,?)', d.instance.sql
|
139
|
+
|
140
|
+
time = Time.parse('2012-12-17 09:23:45 +0900').to_i # JST(+0900)
|
141
|
+
record = {'field1'=>'value1','field2'=>'value2','field3'=>'value3','field4'=>'value4'}
|
142
|
+
d.emit(record, time)
|
143
|
+
d.expect_format ['test', time, ['20121217-002345','test','value1','value2','value3','value4']].to_msgpack
|
144
|
+
d.run
|
145
|
+
end
|
146
|
+
|
147
|
+
def test_time_and_tag_key_json
|
148
|
+
d = create_driver %[
|
149
|
+
host database.local
|
150
|
+
database foo
|
151
|
+
username bar
|
152
|
+
password mogera
|
153
|
+
include_time_key yes
|
154
|
+
utc
|
155
|
+
time_format %Y%m%d-%H%M%S
|
156
|
+
time_key timekey
|
157
|
+
include_tag_key yes
|
158
|
+
tag_key tagkey
|
159
|
+
table accesslog
|
160
|
+
columns jsondata
|
161
|
+
format json
|
162
|
+
]
|
163
|
+
assert_equal 'INSERT INTO accesslog (jsondata) VALUES (?)', d.instance.sql
|
164
|
+
|
165
|
+
time = Time.parse('2012-12-17 09:23:45 +0900').to_i # JST(+0900)
|
166
|
+
record = {'field1'=>'value1'}
|
167
|
+
d.emit(record, time)
|
168
|
+
# Ruby 1.9.3 Hash saves its key order, so this code is OK.
|
169
|
+
d.expect_format ['test', time, record.merge({'timekey'=>'20121217-002345','tagkey'=>'test'}).to_json].to_msgpack
|
170
|
+
d.run
|
171
|
+
end
|
172
|
+
|
173
|
+
def test_jsonpath_format
|
174
|
+
d = create_driver %[
|
175
|
+
host database.local
|
176
|
+
database foo
|
177
|
+
username bar
|
178
|
+
password mogera
|
179
|
+
include_time_key yes
|
180
|
+
utc
|
181
|
+
include_tag_key yes
|
182
|
+
table baz
|
183
|
+
format jsonpath
|
184
|
+
key_names time, tag, id, data.name, tags[0]
|
185
|
+
sql INSERT INTO baz (coltime,coltag,id,name,tag1) VALUES (?,?,?,?,?)
|
186
|
+
]
|
187
|
+
assert_equal 'INSERT INTO baz (coltime,coltag,id,name,tag1) VALUES (?,?,?,?,?)', d.instance.sql
|
188
|
+
|
189
|
+
time = Time.parse('2012-12-17 01:23:45 UTC').to_i
|
190
|
+
record = { 'id' => 15, 'data'=> {'name' => 'jsonpath' }, 'tags' => ['unit', 'simple'] }
|
191
|
+
d.emit(record, time)
|
192
|
+
d.expect_format ['test', time, ['2012-12-17T01:23:45Z','test',15,'jsonpath','unit']].to_msgpack
|
193
|
+
d.run
|
194
|
+
end
|
195
|
+
|
196
|
+
def test_write
|
197
|
+
# hmm....
|
198
|
+
end
|
199
|
+
end
|