fluent-plugin-mongokpi 0.0.0
Sign up to get free protection for your applications and to get access to all the features.
- data/.gitignore +17 -0
- data/Gemfile +4 -0
- data/LICENSE.txt +14 -0
- data/README.md +30 -0
- data/Rakefile +11 -0
- data/fluent-plugin-mongokpi.gemspec +26 -0
- data/lib/fluent/plugin/out_mongokpi.rb +358 -0
- data/test/fluent/plugin/out_mongokpi_test.rb +90 -0
- data/test/helper.rb +29 -0
- metadata +139 -0
data/.gitignore
ADDED
data/Gemfile
ADDED
data/LICENSE.txt
ADDED
@@ -0,0 +1,14 @@
|
|
1
|
+
Copyright (c) 2013- Hatayama Hideharu
|
2
|
+
|
3
|
+
Licensed under the Apache License, Version 2.0 (the "License");
|
4
|
+
you may not use this file except in compliance with the License.
|
5
|
+
You may obtain a copy of the License at
|
6
|
+
|
7
|
+
http://www.apache.org/licenses/LICENSE-2.0
|
8
|
+
|
9
|
+
Unless required by applicable law or agreed to in writing, software
|
10
|
+
distributed under the License is distributed on an "AS IS" BASIS,
|
11
|
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
12
|
+
See the License for the specific language governing permissions and
|
13
|
+
limitations under the License.
|
14
|
+
|
data/README.md
ADDED
@@ -0,0 +1,30 @@
|
|
1
|
+
# Fluent::Plugin::Mongokpi
|
2
|
+
|
3
|
+
Fluent BufferedOutput plugin: counting chunk, inserting counts to make kpi count on MongoDB
|
4
|
+
|
5
|
+
## Installation
|
6
|
+
|
7
|
+
Add this line to your application's Gemfile:
|
8
|
+
|
9
|
+
gem 'fluent-plugin-mongokpi'
|
10
|
+
|
11
|
+
And then execute:
|
12
|
+
|
13
|
+
$ bundle
|
14
|
+
|
15
|
+
Or install it yourself as:
|
16
|
+
|
17
|
+
$ gem install fluent-plugin-mongokpi
|
18
|
+
|
19
|
+
## Usage
|
20
|
+
|
21
|
+
m(_ _)m
|
22
|
+
|
23
|
+
## Contributing
|
24
|
+
|
25
|
+
1. Fork it
|
26
|
+
2. Create your feature branch (`git checkout -b my-new-feature`)
|
27
|
+
3. Commit your changes (`git commit -am 'Add some feature'`)
|
28
|
+
4. Push to the branch (`git push origin my-new-feature`)
|
29
|
+
5. Create new Pull Request
|
30
|
+
|
data/Rakefile
ADDED
@@ -0,0 +1,26 @@
|
|
1
|
+
# coding: utf-8
|
2
|
+
lib = File.expand_path('../lib', __FILE__)
|
3
|
+
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
|
4
|
+
|
5
|
+
Gem::Specification.new do |spec|
|
6
|
+
spec.name = "fluent-plugin-mongokpi"
|
7
|
+
spec.version = "0.0.0"
|
8
|
+
spec.authors = ["Hatayama Hideharu"]
|
9
|
+
spec.email = ["h.hiddy@gmail.com"]
|
10
|
+
spec.description = %q{Fluent BufferedOutput plugin: counting chunk, inserting counts to make kpi count on MongoDB}
|
11
|
+
spec.summary = spec.description
|
12
|
+
spec.homepage = "https://bitbucket.org/hidepiy/fluent-plugin-mongokpi"
|
13
|
+
spec.license = "APLv2"
|
14
|
+
|
15
|
+
spec.files = `git ls-files`.split($/)
|
16
|
+
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
|
17
|
+
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
|
18
|
+
spec.require_paths = ["lib"]
|
19
|
+
|
20
|
+
spec.add_development_dependency "bundler", "~> 1.3"
|
21
|
+
spec.add_development_dependency "rake"
|
22
|
+
spec.add_runtime_dependency "fluentd"
|
23
|
+
spec.add_dependency "mongo", "1.9.2"
|
24
|
+
spec.add_dependency "bson_ext", "1.9.2"
|
25
|
+
end
|
26
|
+
|
@@ -0,0 +1,358 @@
|
|
1
|
+
module Fluent
|
2
|
+
class MongoKpiOutput < Fluent::BufferedOutput
|
3
|
+
# Register plugin first. NAME is the name of this plugin
|
4
|
+
# which is used in the configuration file.
|
5
|
+
Plugin.register_output('mongokpi', self)
|
6
|
+
|
7
|
+
# Mongo client settings
|
8
|
+
config_param :address, :string, :default => 'localhost:27017'
|
9
|
+
config_param :db, :string, :default => 'kpidb'
|
10
|
+
config_param :collection, :string, :default => 'kpiColyyyymmdd'
|
11
|
+
# Mongo connection options
|
12
|
+
config_param :write_concern, :integer, :default => nil
|
13
|
+
config_param :name, :string, :default => nil
|
14
|
+
config_param :read, :string, :default => nil
|
15
|
+
config_param :refresh_mode, :string, :default => nil
|
16
|
+
config_param :refresh_interval, :integer, :default => nil
|
17
|
+
# collection options
|
18
|
+
config_param :capped_size, :integer, :default => 0
|
19
|
+
config_param :capped_max, :integer, :default => 0
|
20
|
+
# KPI count options
|
21
|
+
config_param :time_key, :string, :default => nil
|
22
|
+
config_param :time_format, :string, :default => nil
|
23
|
+
config_param :count_key, :string, :default => 'none'
|
24
|
+
config_param :count_name, :string, :default => 'count'
|
25
|
+
# access count options
|
26
|
+
config_param :kpi_type, :string, :default => nil # use 'access' for activating access count.
|
27
|
+
config_param :responseThreshold, :integer, :default => 1000000
|
28
|
+
config_param :f_code, :string, :default => 'code'
|
29
|
+
config_param :f_response_time, :string, :default => 'response_time'
|
30
|
+
|
31
|
+
|
32
|
+
attr_reader :collections_opts, :connection_opts
|
33
|
+
|
34
|
+
def initialize
|
35
|
+
super
|
36
|
+
require 'date'
|
37
|
+
require 'mongo'
|
38
|
+
require 'msgpack'
|
39
|
+
end
|
40
|
+
|
41
|
+
# This method is called before starting.
|
42
|
+
# 'conf' is a Hash that includes configuration parameters.
|
43
|
+
# If the configuration is invalid, raise Fluent::ConfigError.
|
44
|
+
def configure(conf)
|
45
|
+
super
|
46
|
+
@connection_opts = {}
|
47
|
+
@connection_opts[:w] = @write_concern unless @write_concern.nil?
|
48
|
+
@connection_opts[:name] = @name unless @name.nil?
|
49
|
+
@connection_opts[:read] = @read unless @read.nil?
|
50
|
+
@connection_opts[:refresh_mode] = @refresh_mode unless @refresh_mode.nil?
|
51
|
+
@connection_opts[:refresh_interval] = @refresh_interval unless @refresh_interval.nil?
|
52
|
+
@collections_opts = {}
|
53
|
+
if @capped_size > 0
|
54
|
+
@collections_opts[:capped] = true
|
55
|
+
@collections_opts[:size] = Config.size_value(conf['capped_size'])
|
56
|
+
@collections_opts[:max] = Config.size_value(conf['capped_max']) if @capped_max
|
57
|
+
else
|
58
|
+
@collections_opts[:capped] = false
|
59
|
+
end
|
60
|
+
end
|
61
|
+
|
62
|
+
# This method is called when starting.
|
63
|
+
# Open sockets or files here.
|
64
|
+
def start
|
65
|
+
super
|
66
|
+
@client = get_client(@address, @connection_opts)
|
67
|
+
end
|
68
|
+
|
69
|
+
# This method is called when shutting down.
|
70
|
+
# Shutdown the thread and close sockets or files here.
|
71
|
+
def shutdown
|
72
|
+
@client.db.connection.close
|
73
|
+
super
|
74
|
+
end
|
75
|
+
|
76
|
+
# This method is called when an event is reached.
|
77
|
+
# Convert event to a raw string.
|
78
|
+
#def format(tag, time, record)
|
79
|
+
# [tag, time, record].to_json + "\n"
|
80
|
+
#end
|
81
|
+
## optionally, you can use to_msgpack to serialize the object.
|
82
|
+
#def format(tag, time, record)
|
83
|
+
# [tag, time, record].to_msgpack
|
84
|
+
#end
|
85
|
+
def format(tag, time, record)
|
86
|
+
[tag, time, record].to_msgpack
|
87
|
+
end
|
88
|
+
|
89
|
+
# This method is called every flush interval. write the buffer chunk to files or databases here.
|
90
|
+
# 'chunk' is a buffer chunk that includes multiple formatted events.
|
91
|
+
# You can use 'data = chunk.read' to get all events and 'chunk.open {|io| ... }' to get IO object.
|
92
|
+
#def write(chunk)
|
93
|
+
# data = chunk.read
|
94
|
+
# print data
|
95
|
+
#end
|
96
|
+
## optionally, you can use chunk.msgpack_each to deserialize objects.
|
97
|
+
#def write(chunk)
|
98
|
+
# chunk.msgpack_each {|(tag,time,record)|
|
99
|
+
# }
|
100
|
+
#end
|
101
|
+
def write(chunk)
|
102
|
+
doc_hash = get_insert_doc_hash(@kpi_type, chunk, @time_key, @time_format, @count_key, @count_name)
|
103
|
+
insert(@kpi_type, @collection, @count_key, @count_name, doc_hash)
|
104
|
+
end
|
105
|
+
|
106
|
+
def get_client(address, connection_opts)
|
107
|
+
begin
|
108
|
+
if address.include?(',')
|
109
|
+
return Mongo::MongoReplicaSetClient.new(address.split(','), connection_opts)
|
110
|
+
else
|
111
|
+
host_port = address.split(':', 2)
|
112
|
+
return Mongo::MongoClient.new(host_port[0], host_port[1], collections_opts)
|
113
|
+
end
|
114
|
+
rescue Mongo::ConnectionFailure => e
|
115
|
+
$log.fatal "Failed to connect to 'mongod'. Please restart 'fluentd' after 'mongod' started: #{e}"
|
116
|
+
exit!
|
117
|
+
rescue Mongo::OperationFailure => e
|
118
|
+
$log.fatal "Operation failed. Probably, 'mongod' needs an authentication: #{e}"
|
119
|
+
exit!
|
120
|
+
end
|
121
|
+
end
|
122
|
+
|
123
|
+
def get_collection(collection_name, yyyymmdd)
|
124
|
+
converted_collection_name = convert_collection_name(collection_name, yyyymmdd)
|
125
|
+
if @current_collection.nil? || @current_collection.name != converted_collection_name
|
126
|
+
$log.info "Start using collection: #{converted_collection_name}"
|
127
|
+
@current_collection = get_collection_from_db(@client, @db, converted_collection_name, @collections_opts)
|
128
|
+
end
|
129
|
+
return @current_collection
|
130
|
+
end
|
131
|
+
|
132
|
+
def convert_collection_name(collection_name, yyyymmdd)
|
133
|
+
return collection_name.sub('yyyymmdd', yyyymmdd)
|
134
|
+
end
|
135
|
+
|
136
|
+
def get_collection_from_db(client, db_name, collection_name, collections_opts)
|
137
|
+
return client.db(db_name).collection(collection_name, @collections_opts)
|
138
|
+
end
|
139
|
+
|
140
|
+
def get_insert_doc_hash(kpi_type, chunk, time_key, time_format, count_key, count_name)
|
141
|
+
hash_counter = {}
|
142
|
+
chunk.msgpack_each { |tag, time, record|
|
143
|
+
$log.debug record
|
144
|
+
tmp_time = time_key.nil? ? Time.at(time)
|
145
|
+
: time_format.nil? ? DateTime.parse(record[time_key])
|
146
|
+
: DateTime.strptime(record[time_key], time_format)
|
147
|
+
# with count_key
|
148
|
+
if 'none' != count_key
|
149
|
+
count_key_value = ''
|
150
|
+
count_key.split(',').each { |x| count_key_value += record[x].to_s }
|
151
|
+
key_str = count_key_value + tmp_time.strftime('%Y%m%d%H%M')
|
152
|
+
doc = hash_counter.key?(key_str) ? hash_counter[key_str]
|
153
|
+
: get_doc(kpi_type, count_key, count_key_value, count_name, tmp_time)
|
154
|
+
hash_counter[key_str] = count_up(kpi_type, doc, record, count_name, tmp_time)
|
155
|
+
end
|
156
|
+
# total
|
157
|
+
total_key_str = 'total' + tmp_time.strftime('%Y%m%d%H%M')
|
158
|
+
total = hash_counter.key?(total_key_str) ? hash_counter[total_key_str]
|
159
|
+
: get_doc(kpi_type, count_key, 'total', count_name, tmp_time)
|
160
|
+
hash_counter[total_key_str] = count_up(kpi_type, total, record, count_name, tmp_time)
|
161
|
+
}
|
162
|
+
return hash_counter
|
163
|
+
end
|
164
|
+
|
165
|
+
def get_doc(kpi_type, count_key, count_key_value, count_name, time)
|
166
|
+
doc = {}
|
167
|
+
doc[count_key] = count_key_value
|
168
|
+
doc['yyyymmdd'] = time.strftime('%Y%m%d')
|
169
|
+
doc['hh'] = time.strftime('%H')
|
170
|
+
doc['mm'] = time.strftime('%M')
|
171
|
+
doc[count_name] = 0
|
172
|
+
if 'access' == kpi_type
|
173
|
+
doc['countOver'] = 0
|
174
|
+
doc['count1xx'] = 0
|
175
|
+
doc['count2xx'] = 0
|
176
|
+
doc['count3xx'] = 0
|
177
|
+
doc['count4xx'] = 0
|
178
|
+
doc['count5xx'] = 0
|
179
|
+
doc['responseTimeAve'] = 0
|
180
|
+
doc['responseTimeMax'] = 0
|
181
|
+
doc['responseTimeMin'] = 100000000.00
|
182
|
+
doc['responseTimeSum'] = 0
|
183
|
+
doc['qpsAve'] = 0
|
184
|
+
doc['qpsMax'] = 0
|
185
|
+
doc['qpsMin'] = 100000000
|
186
|
+
doc['okRatio'] = 0.00
|
187
|
+
doc['counter'] = Array.new(60, 0)
|
188
|
+
end
|
189
|
+
return doc
|
190
|
+
end
|
191
|
+
|
192
|
+
def count_up(kpi_type, doc, record, count_name, time)
|
193
|
+
doc[count_name] += 1
|
194
|
+
if 'access' == kpi_type
|
195
|
+
response_time = record.key?(@f_response_time) ? record[@f_response_time].to_i : 0
|
196
|
+
if response_time > @responseThreshold
|
197
|
+
doc['countOver'] += 1
|
198
|
+
end
|
199
|
+
case record[@f_code].to_i / 100
|
200
|
+
when 1 then doc['count1xx'] += 1
|
201
|
+
when 2 then doc['count2xx'] += 1
|
202
|
+
when 3 then doc['count3xx'] += 1
|
203
|
+
when 4 then doc['count4xx'] += 1
|
204
|
+
when 5 then doc['count5xx'] += 1
|
205
|
+
end
|
206
|
+
if doc['responseTimeMax'] < response_time
|
207
|
+
doc['responseTimeMax'] = response_time
|
208
|
+
end
|
209
|
+
if doc['responseTimeMin'] > response_time
|
210
|
+
doc['responseTimeMin'] = response_time
|
211
|
+
end
|
212
|
+
doc['responseTimeSum'] += response_time
|
213
|
+
doc['counter'][time.strftime('%S').to_i] += 1
|
214
|
+
end
|
215
|
+
return doc
|
216
|
+
end
|
217
|
+
|
218
|
+
def insert(kpi_type, collection_name, count_key, count_name, doc_hash)
|
219
|
+
begin
|
220
|
+
doc_hash.each { |key, doc|
|
221
|
+
$log.debug doc
|
222
|
+
collection = get_collection(collection_name, doc['yyyymmdd'])
|
223
|
+
# 2.5 or less
|
224
|
+
# http://stackoverflow.com/questions/8508663/calculate-max-value-in-an-atomic-findandmodify-operation
|
225
|
+
# TODO improve for Mongo 2.6
|
226
|
+
# $min, $max field update operators
|
227
|
+
# https://jira.mongodb.org/browse/SERVER-1534
|
228
|
+
# https://jira.mongodb.org/browse/DOCS-2012
|
229
|
+
if 'access' == kpi_type
|
230
|
+
collection.update(
|
231
|
+
{'_id' => key, count_key => doc[count_key],
|
232
|
+
'yyyymmdd' => doc['yyyymmdd'], 'hh' => doc['hh'], 'mm' => doc['mm']},
|
233
|
+
{'$setOnInsert' => {'counter' => Array.new(60, 0)}},
|
234
|
+
{:upsert => true}
|
235
|
+
)
|
236
|
+
collection.update(
|
237
|
+
{'_id' => key, count_key => doc[count_key],
|
238
|
+
'yyyymmdd' => doc['yyyymmdd'], 'hh' => doc['hh'], 'mm' => doc['mm']},
|
239
|
+
{'$inc' => {
|
240
|
+
count_name => doc[count_name],
|
241
|
+
'countOver' => doc['countOver'],
|
242
|
+
'count1xx' => doc['count1xx'],
|
243
|
+
'count2xx' => doc['count2xx'],
|
244
|
+
'count3xx' => doc['count3xx'],
|
245
|
+
'count4xx' => doc['count4xx'],
|
246
|
+
'count5xx' => doc['count5xx'],
|
247
|
+
'responseTimeSum' => doc['responseTimeSum'],
|
248
|
+
'counter.0' => doc['counter'][0],
|
249
|
+
'counter.1' => doc['counter'][1],
|
250
|
+
'counter.2' => doc['counter'][2],
|
251
|
+
'counter.3' => doc['counter'][3],
|
252
|
+
'counter.4' => doc['counter'][4],
|
253
|
+
'counter.5' => doc['counter'][5],
|
254
|
+
'counter.6' => doc['counter'][6],
|
255
|
+
'counter.7' => doc['counter'][7],
|
256
|
+
'counter.8' => doc['counter'][8],
|
257
|
+
'counter.9' => doc['counter'][9],
|
258
|
+
'counter.10' => doc['counter'][10],
|
259
|
+
'counter.11' => doc['counter'][11],
|
260
|
+
'counter.12' => doc['counter'][12],
|
261
|
+
'counter.13' => doc['counter'][13],
|
262
|
+
'counter.14' => doc['counter'][14],
|
263
|
+
'counter.15' => doc['counter'][15],
|
264
|
+
'counter.16' => doc['counter'][16],
|
265
|
+
'counter.17' => doc['counter'][17],
|
266
|
+
'counter.18' => doc['counter'][18],
|
267
|
+
'counter.19' => doc['counter'][19],
|
268
|
+
'counter.20' => doc['counter'][20],
|
269
|
+
'counter.21' => doc['counter'][21],
|
270
|
+
'counter.22' => doc['counter'][22],
|
271
|
+
'counter.23' => doc['counter'][23],
|
272
|
+
'counter.24' => doc['counter'][24],
|
273
|
+
'counter.25' => doc['counter'][25],
|
274
|
+
'counter.26' => doc['counter'][26],
|
275
|
+
'counter.27' => doc['counter'][27],
|
276
|
+
'counter.28' => doc['counter'][28],
|
277
|
+
'counter.29' => doc['counter'][29],
|
278
|
+
'counter.30' => doc['counter'][30],
|
279
|
+
'counter.31' => doc['counter'][31],
|
280
|
+
'counter.32' => doc['counter'][32],
|
281
|
+
'counter.33' => doc['counter'][33],
|
282
|
+
'counter.34' => doc['counter'][34],
|
283
|
+
'counter.35' => doc['counter'][35],
|
284
|
+
'counter.36' => doc['counter'][36],
|
285
|
+
'counter.37' => doc['counter'][37],
|
286
|
+
'counter.38' => doc['counter'][38],
|
287
|
+
'counter.39' => doc['counter'][39],
|
288
|
+
'counter.40' => doc['counter'][40],
|
289
|
+
'counter.41' => doc['counter'][41],
|
290
|
+
'counter.42' => doc['counter'][42],
|
291
|
+
'counter.43' => doc['counter'][43],
|
292
|
+
'counter.44' => doc['counter'][44],
|
293
|
+
'counter.45' => doc['counter'][45],
|
294
|
+
'counter.46' => doc['counter'][46],
|
295
|
+
'counter.47' => doc['counter'][47],
|
296
|
+
'counter.48' => doc['counter'][48],
|
297
|
+
'counter.49' => doc['counter'][49],
|
298
|
+
'counter.50' => doc['counter'][50],
|
299
|
+
'counter.51' => doc['counter'][51],
|
300
|
+
'counter.52' => doc['counter'][52],
|
301
|
+
'counter.53' => doc['counter'][53],
|
302
|
+
'counter.54' => doc['counter'][54],
|
303
|
+
'counter.55' => doc['counter'][55],
|
304
|
+
'counter.56' => doc['counter'][56],
|
305
|
+
'counter.57' => doc['counter'][57],
|
306
|
+
'counter.58' => doc['counter'][58],
|
307
|
+
'counter.59' => doc['counter'][59]
|
308
|
+
}},
|
309
|
+
{:upsert => true}
|
310
|
+
)
|
311
|
+
updated_doc_array = collection.find({'_id' => key}).to_a
|
312
|
+
$log.debug updated_doc_array
|
313
|
+
continue if updated_doc_array.nil?
|
314
|
+
updated_doc = updated_doc_array[0]
|
315
|
+
response_time_ave = updated_doc['responseTimeSum'] / doc[count_name]
|
316
|
+
if !updated_doc['responseTimeMax'].nil? && updated_doc['responseTimeMax'] > doc['responseTimeMax']
|
317
|
+
response_time_max = updated_doc['responseTimeMax']
|
318
|
+
else
|
319
|
+
response_time_max = doc['responseTimeMax']
|
320
|
+
end
|
321
|
+
if !updated_doc['responseTimeMin'].nil? && updated_doc['responseTimeMin'] < doc['responseTimeMin']
|
322
|
+
response_time_min = updated_doc['responseTimeMin']
|
323
|
+
else
|
324
|
+
response_time_min = doc['responseTimeMin']
|
325
|
+
end
|
326
|
+
qps_ave = (updated_doc['counter'].inject(0.0){|r,i| r+=i } / updated_doc['counter'].size).round
|
327
|
+
qps_max = updated_doc['counter'].max
|
328
|
+
qps_min = updated_doc['counter'].min
|
329
|
+
ok_ratio = ((updated_doc[count_name] - updated_doc['countOver']).to_f / updated_doc[count_name]).round(4)
|
330
|
+
collection.update(
|
331
|
+
{'_id' => key, count_key => doc[count_key],
|
332
|
+
'yyyymmdd' => doc['yyyymmdd'], 'hh' => doc['hh'], 'mm' => doc['mm']},
|
333
|
+
{ '$set' => {
|
334
|
+
'responseTimeAve' => response_time_ave,
|
335
|
+
'responseTimeMax' => response_time_max,
|
336
|
+
'responseTimeMin' => response_time_min,
|
337
|
+
'qpsAve' => qps_ave,
|
338
|
+
'qpsMax' => qps_max,
|
339
|
+
'qpsMin' => qps_min,
|
340
|
+
'okRatio' => ok_ratio
|
341
|
+
}}
|
342
|
+
)
|
343
|
+
else
|
344
|
+
collection.update(
|
345
|
+
{'_id' => key, count_key => doc[count_key],
|
346
|
+
'yyyymmdd' => doc['yyyymmdd'], 'hh' => doc['hh'], 'mm' => doc['mm']},
|
347
|
+
{'$inc' => {count_name => doc[count_name]}},
|
348
|
+
{:upsert => true}
|
349
|
+
)
|
350
|
+
end
|
351
|
+
}
|
352
|
+
rescue Mongo::OperationFailure => e
|
353
|
+
raise e
|
354
|
+
end
|
355
|
+
end
|
356
|
+
end
|
357
|
+
end
|
358
|
+
|
@@ -0,0 +1,90 @@
|
|
1
|
+
require 'helper'
|
2
|
+
|
3
|
+
class DataCounterOutputTest < Test::Unit::TestCase
|
4
|
+
def setup
|
5
|
+
Fluent::Test.setup
|
6
|
+
end
|
7
|
+
|
8
|
+
DEFAULT_CONFIG = %[
|
9
|
+
]
|
10
|
+
|
11
|
+
DEFAULT_TAG = 'default_tag'
|
12
|
+
|
13
|
+
def create_driver(conf = DEFAULT_CONFIG, tag = DEFAULT_TAG)
|
14
|
+
Fluent::Test::BufferedOutputTestDriver.new(Fluent::MongoKpiOutput, tag).configure(conf)
|
15
|
+
end
|
16
|
+
|
17
|
+
def test_configure_default
|
18
|
+
d = create_driver
|
19
|
+
assert_equal 'localhost:27017', d.instance.address
|
20
|
+
assert_equal 'kpidb', d.instance.db
|
21
|
+
assert_equal nil, d.instance.connection_opts[:w]
|
22
|
+
assert_equal nil, d.instance.connection_opts[:name]
|
23
|
+
assert_equal nil, d.instance.connection_opts[:name]
|
24
|
+
assert_equal nil, d.instance.connection_opts[:read]
|
25
|
+
assert_equal nil, d.instance.connection_opts[:refresh_mode]
|
26
|
+
assert_equal nil, d.instance.connection_opts[:refresh_interval]
|
27
|
+
assert_equal false, d.instance.collections_opts[:capped]
|
28
|
+
assert_equal nil, d.instance.collections_opts[:size]
|
29
|
+
assert_equal nil, d.instance.collections_opts[:max]
|
30
|
+
end
|
31
|
+
|
32
|
+
def test_configure
|
33
|
+
d = create_driver(DEFAULT_CONFIG + %[
|
34
|
+
address 49.212.133.23:27019
|
35
|
+
db testdb
|
36
|
+
collection testColyyyymmdd
|
37
|
+
write_concern 1
|
38
|
+
name testreplica
|
39
|
+
read secondary
|
40
|
+
refresh_mode sync
|
41
|
+
refresh_interval 1000
|
42
|
+
capped_size 100
|
43
|
+
capped_max 200
|
44
|
+
])
|
45
|
+
assert_equal '49.212.133.23:27019', d.instance.address
|
46
|
+
assert_equal 'testdb', d.instance.db
|
47
|
+
assert_equal 'testColyyyymmdd', d.instance.collection
|
48
|
+
assert_equal 1, d.instance.connection_opts[:w]
|
49
|
+
assert_equal 'testreplica', d.instance.connection_opts[:name]
|
50
|
+
assert_equal 'secondary', d.instance.connection_opts[:read]
|
51
|
+
assert_equal 'sync', d.instance.connection_opts[:refresh_mode]
|
52
|
+
assert_equal 1000, d.instance.connection_opts[:refresh_interval]
|
53
|
+
assert_equal true, d.instance.collections_opts[:capped]
|
54
|
+
assert_equal 100, d.instance.collections_opts[:size]
|
55
|
+
assert_equal 200, d.instance.collections_opts[:max]
|
56
|
+
end
|
57
|
+
|
58
|
+
def test_format
|
59
|
+
d = create_driver
|
60
|
+
time = Time.parse("2112-09-03 01:23:45 UTC").to_i
|
61
|
+
d.emit({"gerogero" => "Let's get Rocking!", "site" => "yapoo"}, time)
|
62
|
+
d.emit({"gerogero" => "Let's get Rocking!", "site" => "geegero"}, time)
|
63
|
+
d.expect_format "\x93\xABdefault_tag\xCF\x00\x00\x00\x01\f[\xC8\xA1\x82\xA8gerogero\xB2Let's get Rocking!\xA4site\xA5yapoo\x93\xABdefault_tag\xCF\x00\x00\x00\x01\f[\xC8\xA1\x82\xA8gerogero\xB2Let's get Rocking!\xA4site\xA7geegero"
|
64
|
+
# d.run
|
65
|
+
|
66
|
+
# time = Time.parse("2011-01-02 13:14:15 UTC").to_i
|
67
|
+
# d.emit({"a"=>1}, time)
|
68
|
+
# d.emit({"a"=>2}, time)
|
69
|
+
|
70
|
+
# d.expect_format %[2011-01-02T13:14:15Z\ttest\t{"a":1}\n]
|
71
|
+
# d.expect_format %[2011-01-02T13:14:15Z\ttest\t{"a":2}\n]
|
72
|
+
|
73
|
+
# d.run
|
74
|
+
end
|
75
|
+
|
76
|
+
def test_write
|
77
|
+
d = create_driver
|
78
|
+
|
79
|
+
# time = Time.parse("2011-01-02 13:14:15 UTC").to_i
|
80
|
+
# d.emit({"a"=>1}, time)
|
81
|
+
# d.emit({"a"=>2}, time)
|
82
|
+
|
83
|
+
# ### FileOutput#write returns path
|
84
|
+
# path = d.run
|
85
|
+
# expect_path = "#{TMP_DIR}/out_file_test._0.log.gz"
|
86
|
+
# assert_equal expect_path, path
|
87
|
+
end
|
88
|
+
end
|
89
|
+
|
90
|
+
|
data/test/helper.rb
ADDED
@@ -0,0 +1,29 @@
|
|
1
|
+
require 'rubygems'
|
2
|
+
require 'bundler'
|
3
|
+
begin
|
4
|
+
Bundler.setup(:default, :development)
|
5
|
+
rescue Bundler::BundlerError => e
|
6
|
+
$stderr.puts e.message
|
7
|
+
$stderr.puts "Run `bundle install` to install missing gems"
|
8
|
+
exit e.status_code
|
9
|
+
end
|
10
|
+
require 'test/unit'
|
11
|
+
|
12
|
+
$LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
|
13
|
+
$LOAD_PATH.unshift(File.dirname(__FILE__))
|
14
|
+
require 'fluent/test'
|
15
|
+
unless ENV.has_key?('VERBOSE')
|
16
|
+
nulllogger = Object.new
|
17
|
+
nulllogger.instance_eval {|obj|
|
18
|
+
def method_missing(method, *args)
|
19
|
+
# pass
|
20
|
+
end
|
21
|
+
}
|
22
|
+
$log = nulllogger
|
23
|
+
end
|
24
|
+
|
25
|
+
require 'fluent/plugin/out_mongokpi'
|
26
|
+
|
27
|
+
class Test::Unit::TestCase
|
28
|
+
end
|
29
|
+
|
metadata
ADDED
@@ -0,0 +1,139 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: fluent-plugin-mongokpi
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 0.0.0
|
5
|
+
prerelease:
|
6
|
+
platform: ruby
|
7
|
+
authors:
|
8
|
+
- Hatayama Hideharu
|
9
|
+
autorequire:
|
10
|
+
bindir: bin
|
11
|
+
cert_chain: []
|
12
|
+
date: 2013-11-27 00:00:00.000000000 Z
|
13
|
+
dependencies:
|
14
|
+
- !ruby/object:Gem::Dependency
|
15
|
+
name: bundler
|
16
|
+
requirement: !ruby/object:Gem::Requirement
|
17
|
+
none: false
|
18
|
+
requirements:
|
19
|
+
- - ~>
|
20
|
+
- !ruby/object:Gem::Version
|
21
|
+
version: '1.3'
|
22
|
+
type: :development
|
23
|
+
prerelease: false
|
24
|
+
version_requirements: !ruby/object:Gem::Requirement
|
25
|
+
none: false
|
26
|
+
requirements:
|
27
|
+
- - ~>
|
28
|
+
- !ruby/object:Gem::Version
|
29
|
+
version: '1.3'
|
30
|
+
- !ruby/object:Gem::Dependency
|
31
|
+
name: rake
|
32
|
+
requirement: !ruby/object:Gem::Requirement
|
33
|
+
none: false
|
34
|
+
requirements:
|
35
|
+
- - ! '>='
|
36
|
+
- !ruby/object:Gem::Version
|
37
|
+
version: '0'
|
38
|
+
type: :development
|
39
|
+
prerelease: false
|
40
|
+
version_requirements: !ruby/object:Gem::Requirement
|
41
|
+
none: false
|
42
|
+
requirements:
|
43
|
+
- - ! '>='
|
44
|
+
- !ruby/object:Gem::Version
|
45
|
+
version: '0'
|
46
|
+
- !ruby/object:Gem::Dependency
|
47
|
+
name: fluentd
|
48
|
+
requirement: !ruby/object:Gem::Requirement
|
49
|
+
none: false
|
50
|
+
requirements:
|
51
|
+
- - ! '>='
|
52
|
+
- !ruby/object:Gem::Version
|
53
|
+
version: '0'
|
54
|
+
type: :runtime
|
55
|
+
prerelease: false
|
56
|
+
version_requirements: !ruby/object:Gem::Requirement
|
57
|
+
none: false
|
58
|
+
requirements:
|
59
|
+
- - ! '>='
|
60
|
+
- !ruby/object:Gem::Version
|
61
|
+
version: '0'
|
62
|
+
- !ruby/object:Gem::Dependency
|
63
|
+
name: mongo
|
64
|
+
requirement: !ruby/object:Gem::Requirement
|
65
|
+
none: false
|
66
|
+
requirements:
|
67
|
+
- - '='
|
68
|
+
- !ruby/object:Gem::Version
|
69
|
+
version: 1.9.2
|
70
|
+
type: :runtime
|
71
|
+
prerelease: false
|
72
|
+
version_requirements: !ruby/object:Gem::Requirement
|
73
|
+
none: false
|
74
|
+
requirements:
|
75
|
+
- - '='
|
76
|
+
- !ruby/object:Gem::Version
|
77
|
+
version: 1.9.2
|
78
|
+
- !ruby/object:Gem::Dependency
|
79
|
+
name: bson_ext
|
80
|
+
requirement: !ruby/object:Gem::Requirement
|
81
|
+
none: false
|
82
|
+
requirements:
|
83
|
+
- - '='
|
84
|
+
- !ruby/object:Gem::Version
|
85
|
+
version: 1.9.2
|
86
|
+
type: :runtime
|
87
|
+
prerelease: false
|
88
|
+
version_requirements: !ruby/object:Gem::Requirement
|
89
|
+
none: false
|
90
|
+
requirements:
|
91
|
+
- - '='
|
92
|
+
- !ruby/object:Gem::Version
|
93
|
+
version: 1.9.2
|
94
|
+
description: ! 'Fluent BufferedOutput plugin: counting chunk, inserting counts to
|
95
|
+
make kpi count on MongoDB'
|
96
|
+
email:
|
97
|
+
- h.hiddy@gmail.com
|
98
|
+
executables: []
|
99
|
+
extensions: []
|
100
|
+
extra_rdoc_files: []
|
101
|
+
files:
|
102
|
+
- .gitignore
|
103
|
+
- Gemfile
|
104
|
+
- LICENSE.txt
|
105
|
+
- README.md
|
106
|
+
- Rakefile
|
107
|
+
- fluent-plugin-mongokpi.gemspec
|
108
|
+
- lib/fluent/plugin/out_mongokpi.rb
|
109
|
+
- test/fluent/plugin/out_mongokpi_test.rb
|
110
|
+
- test/helper.rb
|
111
|
+
homepage: https://bitbucket.org/hidepiy/fluent-plugin-mongokpi
|
112
|
+
licenses:
|
113
|
+
- APLv2
|
114
|
+
post_install_message:
|
115
|
+
rdoc_options: []
|
116
|
+
require_paths:
|
117
|
+
- lib
|
118
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
119
|
+
none: false
|
120
|
+
requirements:
|
121
|
+
- - ! '>='
|
122
|
+
- !ruby/object:Gem::Version
|
123
|
+
version: '0'
|
124
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
125
|
+
none: false
|
126
|
+
requirements:
|
127
|
+
- - ! '>='
|
128
|
+
- !ruby/object:Gem::Version
|
129
|
+
version: '0'
|
130
|
+
requirements: []
|
131
|
+
rubyforge_project:
|
132
|
+
rubygems_version: 1.8.23
|
133
|
+
signing_key:
|
134
|
+
specification_version: 3
|
135
|
+
summary: ! 'Fluent BufferedOutput plugin: counting chunk, inserting counts to make
|
136
|
+
kpi count on MongoDB'
|
137
|
+
test_files:
|
138
|
+
- test/fluent/plugin/out_mongokpi_test.rb
|
139
|
+
- test/helper.rb
|