fluent-plugin-mongo 0.5.3 → 0.6.0
Sign up to get free protection for your applications and to get access to all the features.
- data/README.rdoc +49 -23
- data/VERSION +1 -1
- data/fluent-plugin-mongo.gemspec +2 -2
- data/lib/fluent/plugin/in_mongo_tail.rb +4 -2
- data/lib/fluent/plugin/mongo_ext.rb +55 -0
- data/lib/fluent/plugin/out_mongo.rb +63 -11
- data/lib/fluent/plugin/out_mongo_replset.rb +73 -0
- data/lib/fluent/plugin/out_mongo_tag_collection.rb +3 -8
- data/test/plugin/out_mongo.rb +4 -0
- data/test/plugin/out_mongo_tag_collection.rb +9 -9
- metadata +20 -18
data/README.rdoc
CHANGED
@@ -7,11 +7,11 @@
|
|
7
7
|
Store fluent-event as MongoDB Document to MongoDB database.
|
8
8
|
|
9
9
|
MongoOutput set 'time' field to a document by default.
|
10
|
-
You set
|
10
|
+
You set _false_ to 'include_time_key' parameter if you disable this behaivor.
|
11
11
|
|
12
|
-
|
12
|
+
==== tag mapped mode
|
13
13
|
|
14
|
-
Tag
|
14
|
+
Tag mapped to MongoDB collection automatically.
|
15
15
|
|
16
16
|
=== MongoBackupOutput
|
17
17
|
|
@@ -35,41 +35,67 @@ Tail capped collection to input data.
|
|
35
35
|
port 10000
|
36
36
|
|
37
37
|
# You can use 'capped' if you want to use capped collection
|
38
|
-
capped
|
38
|
+
capped
|
39
39
|
capped_size 100m
|
40
40
|
|
41
41
|
# Other buffer configurations here
|
42
42
|
</match>
|
43
43
|
|
44
|
-
====
|
44
|
+
==== Tag mapped mode
|
45
45
|
|
46
|
-
|
47
|
-
Because MongoDB and Ruby-Driver checks the total object size at each insertion.
|
48
|
-
If total object size gets over the size limitation,
|
49
|
-
MongoDB returns error or Ruby-Driver raises an Exception.
|
50
|
-
|
51
|
-
So, MongoDB's output plugins reset :buffer_chunk_limit if configurated value is larger than above limitation.
|
52
|
-
- Before v1.8, max of :buffer_chunk_limit is 2MB
|
53
|
-
- After v1.8, max of :buffer_chunk_limit is 10MB
|
54
|
-
|
55
|
-
=== MongoOutputTagCollection
|
56
|
-
|
57
|
-
Use mongo_tag_collection type in match.
|
46
|
+
Use 'tag_mapped' parameter.
|
58
47
|
|
59
48
|
If tag name is "foo.bar", auto create collection "foo.bar" and insert data.
|
60
49
|
|
61
50
|
<match forward.*>
|
62
|
-
type
|
51
|
+
type mongo
|
63
52
|
database fluent
|
64
53
|
|
65
|
-
#
|
54
|
+
# If You use 'tag_mapped', then tag mapped mode enabled.
|
55
|
+
tag_mapped
|
56
|
+
|
57
|
+
# If tag is "forward.foo.bar", then prefix "forward." is removed.
|
58
|
+
# Collection name to insert is "foo.bar".
|
59
|
+
remove_tag_prefix forward.
|
60
|
+
|
61
|
+
# This configuration is used if tag not found. Default is 'untagged'.
|
66
62
|
collection misc
|
67
63
|
|
68
|
-
#
|
69
|
-
# Collection Mapping name is "foo.bar".
|
70
|
-
remove_prefix_collection forward.
|
64
|
+
# Other configurations here
|
71
65
|
</match>
|
72
66
|
|
67
|
+
==== NOTE
|
68
|
+
|
69
|
+
===== Broken data as a BSON
|
70
|
+
|
71
|
+
Fluentd event sometimes has an invalid record as a BSON.
|
72
|
+
In such case, mongo plugin marshals an invalid record using Marshal.dump
|
73
|
+
and re-inserts its to same collection.
|
74
|
+
|
75
|
+
Example:
|
76
|
+
|
77
|
+
{"key1": "invalid value", "key2": "valid value", "time": ISODate("2012-01-15T21:09:53Z") }
|
78
|
+
|
79
|
+
to
|
80
|
+
|
81
|
+
{"__broken_data": "Marshal.dump result", "time": ISODate("2012-01-15T21:09:53Z") }
|
82
|
+
|
83
|
+
mongo-ruby-driver cannot detect invalid attribute,
|
84
|
+
so mongo plugin marshals all attributes excluding Fluentd keys(tag_key and time_key).
|
85
|
+
|
86
|
+
If you ignores an invalid record, please set 'ignore_invalid_record true' in match.
|
87
|
+
|
88
|
+
===== Buffer size limitation
|
89
|
+
|
90
|
+
MongoDB's output plugins have the limitation of buffer size.
|
91
|
+
Because MongoDB and Ruby-Driver checks the total object size at each insertion.
|
92
|
+
If total object size gets over the size limitation,
|
93
|
+
MongoDB returns error or Ruby-Driver raises an Exception.
|
94
|
+
|
95
|
+
So, MongoDB's output plugins reset :buffer_chunk_limit if configurated value is larger than above limitation.
|
96
|
+
- Before v1.8, max of :buffer_chunk_limit is 2MB
|
97
|
+
- After v1.8, max of :buffer_chunk_limit is 10MB
|
98
|
+
|
73
99
|
=== MongoBackupOutput
|
74
100
|
|
75
101
|
Use mongo_backup type in match. mongo_backup alwalys use capped collection.
|
@@ -107,7 +133,7 @@ Use mongo_tail type in source.
|
|
107
133
|
|
108
134
|
=== More configuration
|
109
135
|
|
110
|
-
-
|
136
|
+
- Authentication
|
111
137
|
- Select insert or update
|
112
138
|
- etc
|
113
139
|
|
data/VERSION
CHANGED
@@ -1 +1 @@
|
|
1
|
-
0.
|
1
|
+
0.6.0
|
data/fluent-plugin-mongo.gemspec
CHANGED
@@ -16,8 +16,8 @@ Gem::Specification.new do |gem|
|
|
16
16
|
gem.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
|
17
17
|
gem.require_paths = ['lib']
|
18
18
|
|
19
|
-
gem.add_dependency "fluentd", "
|
20
|
-
gem.add_dependency "mongo", "
|
19
|
+
gem.add_dependency "fluentd", ">= 0.10.7"
|
20
|
+
gem.add_dependency "mongo", "= 1.5.2"
|
21
21
|
gem.add_development_dependency "rake", ">= 0.9.2"
|
22
22
|
gem.add_development_dependency "simplecov", ">= 0.5.4"
|
23
23
|
gem.add_development_dependency "rr", ">= 1.0.0"
|
@@ -31,6 +31,8 @@ class MongoTailInput < Input
|
|
31
31
|
end
|
32
32
|
|
33
33
|
@last_id = @id_store_file ? get_last_id : nil
|
34
|
+
|
35
|
+
$log.debug "Setup mongo_tail configuration: mode = #{@id_store_file ? 'persistent' : 'non-persistent'}"
|
34
36
|
end
|
35
37
|
|
36
38
|
def start
|
@@ -61,9 +63,9 @@ class MongoTailInput < Input
|
|
61
63
|
|
62
64
|
def get_capped_collection
|
63
65
|
db = Mongo::Connection.new(@host, @port).db(@database)
|
64
|
-
raise ConfigError, "'#{@database}.#{@collection}' not found:
|
66
|
+
raise ConfigError, "'#{@database}.#{@collection}' not found: node = #{@host}:#{@port}" unless db.collection_names.include?(@collection)
|
65
67
|
collection = db.collection(@collection)
|
66
|
-
raise ConfigError, "'#{@database}.#{@collection}' is not capped:
|
68
|
+
raise ConfigError, "'#{@database}.#{@collection}' is not capped: node = #{@host}:#{@port}" unless collection.capped?
|
67
69
|
collection
|
68
70
|
end
|
69
71
|
|
@@ -0,0 +1,55 @@
|
|
1
|
+
require 'mongo'
|
2
|
+
|
3
|
+
module Mongo
|
4
|
+
class Collection
|
5
|
+
# Temporary fix.
|
6
|
+
# See pull request 82: https://github.com/mongodb/mongo-ruby-driver/pull/82
|
7
|
+
def insert_documents(documents, collection_name=@name, check_keys=true, safe=false, flags={})
|
8
|
+
if flags[:continue_on_error]
|
9
|
+
message = BSON::ByteBuffer.new
|
10
|
+
message.put_int(1)
|
11
|
+
else
|
12
|
+
message = BSON::ByteBuffer.new("\0\0\0\0")
|
13
|
+
end
|
14
|
+
|
15
|
+
collect_on_error = !!flags[:collect_on_error]
|
16
|
+
error_docs = [] if collect_on_error
|
17
|
+
|
18
|
+
BSON::BSON_RUBY.serialize_cstr(message, "#{@db.name}.#{collection_name}")
|
19
|
+
documents =
|
20
|
+
if collect_on_error
|
21
|
+
documents.select do |doc|
|
22
|
+
begin
|
23
|
+
message.put_binary(BSON::BSON_CODER.serialize(doc, check_keys, true, @connection.max_bson_size).to_s)
|
24
|
+
true
|
25
|
+
rescue StandardError => e # StandardError will be replaced with BSONError
|
26
|
+
doc.delete(:_id)
|
27
|
+
error_docs << doc
|
28
|
+
false
|
29
|
+
end
|
30
|
+
end
|
31
|
+
else
|
32
|
+
documents.each do |doc|
|
33
|
+
message.put_binary(BSON::BSON_CODER.serialize(doc, check_keys, true, @connection.max_bson_size).to_s)
|
34
|
+
end
|
35
|
+
end
|
36
|
+
raise InvalidOperation, "Exceded maximum insert size of 16,000,000 bytes" if message.size > 16_000_000
|
37
|
+
|
38
|
+
instrument(:insert, :database => @db.name, :collection => collection_name, :documents => documents) do
|
39
|
+
if safe
|
40
|
+
@connection.send_message_with_safe_check(Mongo::Constants::OP_INSERT, message, @db.name, nil, safe)
|
41
|
+
else
|
42
|
+
@connection.send_message(Mongo::Constants::OP_INSERT, message)
|
43
|
+
end
|
44
|
+
end
|
45
|
+
|
46
|
+
doc_ids = documents.collect { |o| o[:_id] || o['_id'] }
|
47
|
+
if collect_on_error
|
48
|
+
return doc_ids, error_docs
|
49
|
+
else
|
50
|
+
doc_ids
|
51
|
+
end
|
52
|
+
end
|
53
|
+
end
|
54
|
+
end
|
55
|
+
|
@@ -11,15 +11,21 @@ class MongoOutput < BufferedOutput
|
|
11
11
|
config_set_default :include_time_key, true
|
12
12
|
|
13
13
|
config_param :database, :string
|
14
|
-
config_param :collection, :string
|
14
|
+
config_param :collection, :string, :default => 'untagged'
|
15
15
|
config_param :host, :string, :default => 'localhost'
|
16
16
|
config_param :port, :integer, :default => 27017
|
17
|
+
config_param :ignore_invalid_record, :bool, :default => false
|
18
|
+
|
19
|
+
# tag mapping mode
|
20
|
+
config_param :tag_mapped, :bool, :default => false
|
21
|
+
config_param :remove_tag_prefix, :string, :default => nil
|
17
22
|
|
18
23
|
attr_reader :argument
|
19
24
|
|
20
25
|
def initialize
|
21
26
|
super
|
22
27
|
require 'mongo'
|
28
|
+
require 'fluent/plugin/mongo_ext'
|
23
29
|
require 'msgpack'
|
24
30
|
|
25
31
|
@clients = {}
|
@@ -29,6 +35,13 @@ class MongoOutput < BufferedOutput
|
|
29
35
|
def configure(conf)
|
30
36
|
super
|
31
37
|
|
38
|
+
@tag_mapped = true if conf.has_key?('tag_mapped')
|
39
|
+
raise ConfigError, "normal mode requires collection parameter" if !@tag_mapped and !conf.has_key?('collection')
|
40
|
+
|
41
|
+
if remove_tag_prefix = conf['remove_tag_prefix']
|
42
|
+
@remove_tag_prefix = Regexp.new('^' + Regexp.escape(remove_tag_prefix))
|
43
|
+
end
|
44
|
+
|
32
45
|
# capped configuration
|
33
46
|
if conf.has_key?('capped')
|
34
47
|
raise ConfigError, "'capped_size' parameter is required on <store> of Mongo output" unless conf.has_key?('capped_size')
|
@@ -47,6 +60,8 @@ class MongoOutput < BufferedOutput
|
|
47
60
|
def @timef.format_nocache(time)
|
48
61
|
time
|
49
62
|
end
|
63
|
+
|
64
|
+
$log.debug "Setup mongo configuration: mode = #{@tag_mapped ? 'tag mapped' : 'normal'}"
|
50
65
|
end
|
51
66
|
|
52
67
|
def start
|
@@ -63,22 +78,45 @@ class MongoOutput < BufferedOutput
|
|
63
78
|
[time, record].to_msgpack
|
64
79
|
end
|
65
80
|
|
66
|
-
def
|
67
|
-
|
81
|
+
def emit(tag, es, chain)
|
82
|
+
# TODO: Should replacement using eval in configure?
|
83
|
+
if @tag_mapped
|
84
|
+
super(tag, es, chain, tag)
|
85
|
+
else
|
86
|
+
super(tag, es, chain)
|
87
|
+
end
|
68
88
|
end
|
69
89
|
|
70
|
-
def
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
formatted = @collection if formatted.size == 0 # set default for nil tag
|
75
|
-
formatted
|
90
|
+
def write(chunk)
|
91
|
+
# TODO: See emit comment
|
92
|
+
collection_name = @tag_mapped ? chunk.key : @collection
|
93
|
+
operate(collection_name, collect_records(chunk))
|
76
94
|
end
|
77
95
|
|
78
96
|
private
|
79
97
|
|
98
|
+
INSERT_ARGUMENT = {:collect_on_error => true}
|
99
|
+
BROKEN_DATA_KEY = '__broken_data'
|
100
|
+
|
80
101
|
def operate(collection_name, records)
|
81
|
-
get_or_create_collection(collection_name)
|
102
|
+
collection = get_or_create_collection(collection_name)
|
103
|
+
record_ids, error_records = collection.insert(records, INSERT_ARGUMENT)
|
104
|
+
if error_records
|
105
|
+
if @ignore_invalid_record
|
106
|
+
$log.warn "Ignore #{error_records.size} documents"
|
107
|
+
else
|
108
|
+
# Should create another collection like name_broken?
|
109
|
+
converted_records = error_records.map { |record|
|
110
|
+
new_record = {}
|
111
|
+
new_record[@tag_key] = record.delete(@tag_key) if @include_tag_key
|
112
|
+
new_record[@time_key] = record.delete(@time_key)
|
113
|
+
new_record[BROKEN_DATA_KEY] = Marshal.dump(record) # Should use BSON::ByteBuffer
|
114
|
+
new_record
|
115
|
+
}
|
116
|
+
collection.insert(converted_records)
|
117
|
+
end
|
118
|
+
end
|
119
|
+
records
|
82
120
|
end
|
83
121
|
|
84
122
|
def collect_records(chunk)
|
@@ -90,11 +128,21 @@ class MongoOutput < BufferedOutput
|
|
90
128
|
records
|
91
129
|
end
|
92
130
|
|
131
|
+
FORMAT_COLLECTION_NAME_RE = /(^\.+)|(\.+$)/
|
132
|
+
|
133
|
+
def format_collection_name(collection_name)
|
134
|
+
formatted = collection_name
|
135
|
+
formatted = formatted.gsub(@remove_tag_prefix, '') if @remove_tag_prefix
|
136
|
+
formatted = formatted.gsub(FORMAT_COLLECTION_NAME_RE, '')
|
137
|
+
formatted = @collection if formatted.size == 0 # set default for nil tag
|
138
|
+
formatted
|
139
|
+
end
|
140
|
+
|
93
141
|
def get_or_create_collection(collection_name)
|
94
142
|
collection_name = format_collection_name(collection_name)
|
95
143
|
return @clients[collection_name] if @clients[collection_name]
|
96
144
|
|
97
|
-
@db ||=
|
145
|
+
@db ||= get_connection
|
98
146
|
if @db.collection_names.include?(collection_name)
|
99
147
|
collection = @db.collection(collection_name)
|
100
148
|
unless @argument[:capped] == collection.capped? # TODO: Verify capped configuration
|
@@ -108,6 +156,10 @@ class MongoOutput < BufferedOutput
|
|
108
156
|
@clients[collection_name] = collection
|
109
157
|
end
|
110
158
|
|
159
|
+
def get_connection
|
160
|
+
Mongo::Connection.new(@host, @port).db(@database)
|
161
|
+
end
|
162
|
+
|
111
163
|
# Following limits are heuristic. BSON is sometimes bigger than MessagePack and JSON.
|
112
164
|
LIMIT_BEFORE_v1_8 = 2 * 1024 * 1024 # 2MB = 4MB / 2
|
113
165
|
LIMIT_AFTER_v1_8 = 10 * 1024 * 1024 # 10MB = 16MB / 2 + alpha
|
@@ -0,0 +1,73 @@
|
|
1
|
+
require 'fluent/plugin/out_mongo'
|
2
|
+
|
3
|
+
module Fluent
|
4
|
+
|
5
|
+
|
6
|
+
class MongoOutputReplset < MongoOutput
|
7
|
+
Fluent::Plugin.register_output('mongo_replset', self)
|
8
|
+
|
9
|
+
config_param :nodes, :string
|
10
|
+
config_param :name, :string, :default => nil
|
11
|
+
config_param :read, :string, :default => nil
|
12
|
+
config_param :refresh_mode, :string, :default => nil
|
13
|
+
config_param :refresh_interval, :integer, :default => nil
|
14
|
+
config_param :num_retries, :integer, :default => 60
|
15
|
+
|
16
|
+
def configure(conf)
|
17
|
+
super
|
18
|
+
|
19
|
+
@nodes = parse_nodes(conf['nodes'])
|
20
|
+
@rs_argument = {}
|
21
|
+
if name = conf['name']
|
22
|
+
@rs_argument[:name] = conf['name']
|
23
|
+
end
|
24
|
+
if read = conf['read']
|
25
|
+
@rs_argument[:read] = read.to_sym
|
26
|
+
end
|
27
|
+
if refresh_mode = conf['refresh_mode']
|
28
|
+
@rs_argument[:refresh_mode] = refresh_mode.to_sym
|
29
|
+
end
|
30
|
+
if refresh_interval = conf['refresh_interval']
|
31
|
+
@rs_argument[:refresh_interval] = refresh_interval
|
32
|
+
end
|
33
|
+
|
34
|
+
$log.debug "Setup replica set configuration: nodes = #{conf['nodes']}"
|
35
|
+
end
|
36
|
+
|
37
|
+
private
|
38
|
+
|
39
|
+
def operate(collection_name, records)
|
40
|
+
collection = get_or_create_collection(collection_name)
|
41
|
+
rescue_connection_failure do
|
42
|
+
collection.insert(records)
|
43
|
+
end
|
44
|
+
end
|
45
|
+
|
46
|
+
def parse_nodes(nodes)
|
47
|
+
nodes.split(',').map { |node|
|
48
|
+
host, port = node.split(':')
|
49
|
+
[host, Integer(port)]
|
50
|
+
}
|
51
|
+
end
|
52
|
+
|
53
|
+
def get_connection
|
54
|
+
Mongo::ReplSetConnection.new(*@nodes, @rs_argument).db(@database)
|
55
|
+
end
|
56
|
+
|
57
|
+
def rescue_connection_failure
|
58
|
+
retries = 0
|
59
|
+
begin
|
60
|
+
yield
|
61
|
+
rescue Mongo::ConnectionFailure => e
|
62
|
+
retries += 1
|
63
|
+
raise e if retries > @num_retries
|
64
|
+
|
65
|
+
$log.warn "Failed to connect to Replica Set. Try to retry: retry number = #{retries}"
|
66
|
+
sleep 0.5
|
67
|
+
retry
|
68
|
+
end
|
69
|
+
end
|
70
|
+
end
|
71
|
+
|
72
|
+
|
73
|
+
end
|
@@ -11,17 +11,12 @@ class MongoOutputTagCollection < MongoOutput
|
|
11
11
|
def configure(conf)
|
12
12
|
super
|
13
13
|
|
14
|
+
@tag_mapped = true
|
14
15
|
if remove_prefix_collection = conf['remove_prefix_collection']
|
15
|
-
@
|
16
|
+
@remove_tag_prefix = Regexp.new('^' + Regexp.escape(remove_prefix_collection))
|
16
17
|
end
|
17
|
-
end
|
18
|
-
|
19
|
-
def emit(tag, es, chain)
|
20
|
-
super(tag, es, chain, tag)
|
21
|
-
end
|
22
18
|
|
23
|
-
|
24
|
-
operate(chunk.key, collect_records(chunk))
|
19
|
+
$log.warn "'mongo_tag_collection' deprecated. Please use 'mongo' type with 'tag_mapped' parameter"
|
25
20
|
end
|
26
21
|
end
|
27
22
|
|
data/test/plugin/out_mongo.rb
CHANGED
@@ -82,6 +82,10 @@ class MongoOutputTest < Test::Unit::TestCase
|
|
82
82
|
assert_equal('test', collection_name)
|
83
83
|
end
|
84
84
|
|
85
|
+
def test_write_with_invalid_recoreds
|
86
|
+
skip('Implement this test using BSON directory later')
|
87
|
+
end
|
88
|
+
|
85
89
|
def test_write_at_enable_tag
|
86
90
|
d = create_driver(CONFIG + %[
|
87
91
|
include_tag_key true
|
@@ -1,20 +1,20 @@
|
|
1
|
-
|
2
1
|
require 'test_helper'
|
3
2
|
|
4
3
|
class MongoTagCollectionTest < Test::Unit::TestCase
|
5
4
|
def setup
|
6
5
|
Fluent::Test.setup
|
7
|
-
require 'fluent/plugin/
|
6
|
+
require 'fluent/plugin/out_mongo'
|
8
7
|
end
|
9
8
|
|
10
9
|
CONFIG = %[
|
11
|
-
type
|
10
|
+
type mongo
|
12
11
|
database fluent
|
13
|
-
|
12
|
+
tag_mapped
|
13
|
+
remove_tag_prefix should.remove.
|
14
14
|
]
|
15
15
|
|
16
16
|
def create_driver(conf = CONFIG)
|
17
|
-
Fluent::Test::BufferedOutputTestDriver.new(Fluent::
|
17
|
+
Fluent::Test::BufferedOutputTestDriver.new(Fluent::MongoOutput) {
|
18
18
|
def start
|
19
19
|
super
|
20
20
|
end
|
@@ -31,7 +31,7 @@ class MongoTagCollectionTest < Test::Unit::TestCase
|
|
31
31
|
|
32
32
|
def test_configure
|
33
33
|
d = create_driver(CONFIG)
|
34
|
-
assert_equal(/^should\.remove\./, d.instance.instance_variable_get(:@
|
34
|
+
assert_equal(/^should\.remove\./, d.instance.instance_variable_get(:@remove_tag_prefix))
|
35
35
|
end
|
36
36
|
|
37
37
|
def emit_documents(d)
|
@@ -54,8 +54,8 @@ class MongoTagCollectionTest < Test::Unit::TestCase
|
|
54
54
|
|
55
55
|
def test_remove_prefix_collection
|
56
56
|
d = create_driver(CONFIG)
|
57
|
-
assert_equal('prefix', d.instance.format_collection_name
|
58
|
-
assert_equal('test', d.instance.format_collection_name
|
59
|
-
assert_equal('test.foo', d.instance.format_collection_name
|
57
|
+
assert_equal('prefix', d.instance.__send__(:format_collection_name, 'should.remove.prefix'))
|
58
|
+
assert_equal('test', d.instance.__send__(:format_collection_name, '..test..'))
|
59
|
+
assert_equal('test.foo', d.instance.__send__(:format_collection_name, '..test.foo.'))
|
60
60
|
end
|
61
61
|
end
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: fluent-plugin-mongo
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.6.0
|
5
5
|
prerelease:
|
6
6
|
platform: ruby
|
7
7
|
authors:
|
@@ -9,33 +9,33 @@ authors:
|
|
9
9
|
autorequire:
|
10
10
|
bindir: bin
|
11
11
|
cert_chain: []
|
12
|
-
date:
|
12
|
+
date: 2012-01-15 00:00:00.000000000Z
|
13
13
|
dependencies:
|
14
14
|
- !ruby/object:Gem::Dependency
|
15
15
|
name: fluentd
|
16
|
-
requirement: &
|
16
|
+
requirement: &2155986740 !ruby/object:Gem::Requirement
|
17
17
|
none: false
|
18
18
|
requirements:
|
19
|
-
- -
|
19
|
+
- - ! '>='
|
20
20
|
- !ruby/object:Gem::Version
|
21
|
-
version: 0.10.
|
21
|
+
version: 0.10.7
|
22
22
|
type: :runtime
|
23
23
|
prerelease: false
|
24
|
-
version_requirements: *
|
24
|
+
version_requirements: *2155986740
|
25
25
|
- !ruby/object:Gem::Dependency
|
26
26
|
name: mongo
|
27
|
-
requirement: &
|
27
|
+
requirement: &2155985080 !ruby/object:Gem::Requirement
|
28
28
|
none: false
|
29
29
|
requirements:
|
30
|
-
- -
|
30
|
+
- - =
|
31
31
|
- !ruby/object:Gem::Version
|
32
|
-
version: 1.
|
32
|
+
version: 1.5.2
|
33
33
|
type: :runtime
|
34
34
|
prerelease: false
|
35
|
-
version_requirements: *
|
35
|
+
version_requirements: *2155985080
|
36
36
|
- !ruby/object:Gem::Dependency
|
37
37
|
name: rake
|
38
|
-
requirement: &
|
38
|
+
requirement: &2155982420 !ruby/object:Gem::Requirement
|
39
39
|
none: false
|
40
40
|
requirements:
|
41
41
|
- - ! '>='
|
@@ -43,10 +43,10 @@ dependencies:
|
|
43
43
|
version: 0.9.2
|
44
44
|
type: :development
|
45
45
|
prerelease: false
|
46
|
-
version_requirements: *
|
46
|
+
version_requirements: *2155982420
|
47
47
|
- !ruby/object:Gem::Dependency
|
48
48
|
name: simplecov
|
49
|
-
requirement: &
|
49
|
+
requirement: &2155978740 !ruby/object:Gem::Requirement
|
50
50
|
none: false
|
51
51
|
requirements:
|
52
52
|
- - ! '>='
|
@@ -54,10 +54,10 @@ dependencies:
|
|
54
54
|
version: 0.5.4
|
55
55
|
type: :development
|
56
56
|
prerelease: false
|
57
|
-
version_requirements: *
|
57
|
+
version_requirements: *2155978740
|
58
58
|
- !ruby/object:Gem::Dependency
|
59
59
|
name: rr
|
60
|
-
requirement: &
|
60
|
+
requirement: &2155976020 !ruby/object:Gem::Requirement
|
61
61
|
none: false
|
62
62
|
requirements:
|
63
63
|
- - ! '>='
|
@@ -65,7 +65,7 @@ dependencies:
|
|
65
65
|
version: 1.0.0
|
66
66
|
type: :development
|
67
67
|
prerelease: false
|
68
|
-
version_requirements: *
|
68
|
+
version_requirements: *2155976020
|
69
69
|
description: MongoDB plugin for Fluent event collector
|
70
70
|
email: repeatedly@gmail.com
|
71
71
|
executables:
|
@@ -84,8 +84,10 @@ files:
|
|
84
84
|
- bin/mongo-tail
|
85
85
|
- fluent-plugin-mongo.gemspec
|
86
86
|
- lib/fluent/plugin/in_mongo_tail.rb
|
87
|
+
- lib/fluent/plugin/mongo_ext.rb
|
87
88
|
- lib/fluent/plugin/out_mongo.rb
|
88
89
|
- lib/fluent/plugin/out_mongo_backup.rb
|
90
|
+
- lib/fluent/plugin/out_mongo_replset.rb
|
89
91
|
- lib/fluent/plugin/out_mongo_tag_collection.rb
|
90
92
|
- test/plugin/in_mongo_tail.rb
|
91
93
|
- test/plugin/out_mongo.rb
|
@@ -105,7 +107,7 @@ required_ruby_version: !ruby/object:Gem::Requirement
|
|
105
107
|
version: '0'
|
106
108
|
segments:
|
107
109
|
- 0
|
108
|
-
hash: -
|
110
|
+
hash: -1464110601700401475
|
109
111
|
required_rubygems_version: !ruby/object:Gem::Requirement
|
110
112
|
none: false
|
111
113
|
requirements:
|
@@ -114,7 +116,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
114
116
|
version: '0'
|
115
117
|
segments:
|
116
118
|
- 0
|
117
|
-
hash: -
|
119
|
+
hash: -1464110601700401475
|
118
120
|
requirements: []
|
119
121
|
rubyforge_project:
|
120
122
|
rubygems_version: 1.8.10
|