logstash-output-mongodb 3.1.4 → 3.1.5
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +3 -0
- data/CONTRIBUTORS +1 -0
- data/lib/logstash/outputs/mongodb.rb +39 -23
- data/logstash-output-mongodb.gemspec +1 -1
- data/spec/outputs/mongodb_spec.rb +53 -14
- metadata +3 -3
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: a80fe32d5f3500dadd616df40cbf18fdb09678d4c2501d7a5514662cf6934b87
|
4
|
+
data.tar.gz: ca57807a9e2a897d6b2d4177735c58a7a2ac8db5f156148393f5e8ae6694584f
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 338736bbad3f0f3f7bea52c42afbf820490308f4c66de9918c30ab87ec65f3a8dc143cf2466de117c5ed747a604fe7ea5d34255914ecb47cd235593aaa7d8fce
|
7
|
+
data.tar.gz: 44f24d65baec8b345730066a17fa0c329d8b6d8109f50c5f50351abb4902d0c68f4d8af54d9f42556c65b0724975bf2426f16e59150f3fa3f0971d78c133206d
|
data/CHANGELOG.md
CHANGED
data/CONTRIBUTORS
CHANGED
@@ -15,6 +15,7 @@ Contributors:
|
|
15
15
|
* Richard Pijnenburg (electrical)
|
16
16
|
* bitsofinfo (bitsofinfo)
|
17
17
|
* Guy Boertje (guyboertje)
|
18
|
+
* Colin Surprenant (colinsurprenant)
|
18
19
|
|
19
20
|
Note: If you've sent us patches, bug reports, or otherwise contributed to
|
20
21
|
Logstash, and you aren't on the list above and want to be, please let us know
|
@@ -46,19 +46,22 @@ class LogStash::Outputs::Mongodb < LogStash::Outputs::Base
|
|
46
46
|
# Mutex used to synchronize access to 'documents'
|
47
47
|
@@mutex = Mutex.new
|
48
48
|
|
49
|
-
public
|
50
49
|
def register
|
50
|
+
if @bulk_size > 1000
|
51
|
+
raise LogStash::ConfigurationError, "Bulk size must be lower than '1000', currently '#{@bulk_size}'"
|
52
|
+
end
|
53
|
+
|
51
54
|
Mongo::Logger.logger = @logger
|
52
55
|
conn = Mongo::Client.new(@uri)
|
53
56
|
@db = conn.use(@database)
|
54
57
|
|
55
|
-
|
56
|
-
raise LogStash::ConfigurationError, "Bulk size must be lower than '1000', currently '#{@bulk_size}'"
|
57
|
-
end
|
58
|
+
@closed = Concurrent::AtomicBoolean.new(false)
|
58
59
|
@documents = {}
|
59
|
-
|
60
|
-
|
61
|
-
|
60
|
+
|
61
|
+
@bulk_thread = Thread.new(@bulk_interval) do |bulk_interval|
|
62
|
+
while @closed.false? do
|
63
|
+
sleep(bulk_interval)
|
64
|
+
|
62
65
|
@@mutex.synchronize do
|
63
66
|
@documents.each do |collection, values|
|
64
67
|
if values.length > 0
|
@@ -69,23 +72,31 @@ class LogStash::Outputs::Mongodb < LogStash::Outputs::Base
|
|
69
72
|
end
|
70
73
|
end
|
71
74
|
end
|
72
|
-
end
|
75
|
+
end
|
73
76
|
|
74
77
|
def receive(event)
|
75
78
|
begin
|
76
79
|
# Our timestamp object now has a to_bson method, using it here
|
77
80
|
# {}.merge(other) so we don't taint the event hash innards
|
78
81
|
document = {}.merge(event.to_hash)
|
82
|
+
|
79
83
|
if !@isodate
|
80
|
-
|
81
|
-
|
84
|
+
timestamp = event.timestamp
|
85
|
+
if timestamp
|
86
|
+
# not using timestamp.to_bson
|
87
|
+
document["@timestamp"] = timestamp.to_json
|
88
|
+
else
|
89
|
+
@logger.warn("Cannot set MongoDB document `@timestamp` field because it does not exist in the event", :event => event)
|
90
|
+
end
|
82
91
|
end
|
92
|
+
|
83
93
|
if @generateId
|
84
|
-
document["_id"] = BSON::ObjectId.new
|
94
|
+
document["_id"] = BSON::ObjectId.new
|
85
95
|
end
|
96
|
+
|
86
97
|
if @bulk
|
98
|
+
collection = event.sprintf(@collection)
|
87
99
|
@@mutex.synchronize do
|
88
|
-
collection = event.sprintf(@collection)
|
89
100
|
if(!@documents[collection])
|
90
101
|
@documents[collection] = []
|
91
102
|
end
|
@@ -99,20 +110,25 @@ class LogStash::Outputs::Mongodb < LogStash::Outputs::Base
|
|
99
110
|
else
|
100
111
|
@db[event.sprintf(@collection)].insert_one(document)
|
101
112
|
end
|
102
|
-
|
103
113
|
rescue => e
|
104
|
-
@logger.warn("Failed to send event to MongoDB", :event => event, :exception => e,
|
105
|
-
:backtrace => e.backtrace)
|
106
114
|
if e.message =~ /^E11000/
|
107
|
-
|
108
|
-
|
109
|
-
|
110
|
-
|
111
|
-
|
115
|
+
# On a duplicate key error, skip the insert.
|
116
|
+
# We could check if the duplicate key err is the _id key
|
117
|
+
# and generate a new primary key.
|
118
|
+
# If the duplicate key error is on another field, we have no way
|
119
|
+
# to fix the issue.
|
120
|
+
@logger.warn("Skipping insert because of a duplicate key error", :event => event, :exception => e)
|
112
121
|
else
|
113
|
-
|
122
|
+
@logger.warn("Failed to send event to MongoDB, retrying in #{@retry_delay.to_s} seconds", :event => event, :exception => e)
|
123
|
+
sleep(@retry_delay)
|
114
124
|
retry
|
115
125
|
end
|
116
126
|
end
|
117
|
-
end
|
118
|
-
|
127
|
+
end
|
128
|
+
|
129
|
+
def close
|
130
|
+
@closed.make_true
|
131
|
+
@bulk_thread.wakeup
|
132
|
+
@bulk_thread.join
|
133
|
+
end
|
134
|
+
end
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
s.name = 'logstash-output-mongodb'
|
3
|
-
s.version = '3.1.
|
3
|
+
s.version = '3.1.5'
|
4
4
|
s.licenses = ['Apache License (2.0)']
|
5
5
|
s.summary = "Writes events to MongoDB"
|
6
6
|
s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
|
@@ -8,23 +8,21 @@ describe LogStash::Outputs::Mongodb do
|
|
8
8
|
let(:database) { 'logstash' }
|
9
9
|
let(:collection) { 'logs' }
|
10
10
|
|
11
|
-
let(:config)
|
12
|
-
|
13
|
-
|
11
|
+
let(:config) {{
|
12
|
+
"uri" => uri,
|
13
|
+
"database" => database,
|
14
|
+
"collection" => collection
|
15
|
+
}}
|
14
16
|
|
15
|
-
it "should register" do
|
17
|
+
it "should register and close" do
|
16
18
|
plugin = LogStash::Plugin.lookup("output", "mongodb").new(config)
|
17
19
|
expect {plugin.register}.to_not raise_error
|
20
|
+
plugin.close
|
18
21
|
end
|
19
22
|
|
20
|
-
describe "
|
21
|
-
|
23
|
+
describe "receive" do
|
22
24
|
subject! { LogStash::Outputs::Mongodb.new(config) }
|
23
25
|
|
24
|
-
let(:properties) { { "message" => "This is a message!",
|
25
|
-
"uuid" => SecureRandom.uuid,
|
26
|
-
"number" => BigDecimal.new("4321.1234"),
|
27
|
-
"utf8" => "żółć"} }
|
28
26
|
let(:event) { LogStash::Event.new(properties) }
|
29
27
|
let(:connection) { double("connection") }
|
30
28
|
let(:client) { double("client") }
|
@@ -38,10 +36,51 @@ describe LogStash::Outputs::Mongodb do
|
|
38
36
|
subject.register
|
39
37
|
end
|
40
38
|
|
41
|
-
|
42
|
-
|
43
|
-
|
39
|
+
after(:each) do
|
40
|
+
subject.close
|
41
|
+
end
|
42
|
+
|
43
|
+
describe "#send" do
|
44
|
+
let(:properties) {{
|
45
|
+
"message" => "This is a message!",
|
46
|
+
"uuid" => SecureRandom.uuid,
|
47
|
+
"number" => BigDecimal.new("4321.1234"),
|
48
|
+
"utf8" => "żółć"
|
49
|
+
}}
|
50
|
+
|
51
|
+
it "should send the event to the database" do
|
52
|
+
expect(collection).to receive(:insert_one)
|
53
|
+
subject.receive(event)
|
54
|
+
end
|
44
55
|
end
|
45
|
-
end
|
46
56
|
|
57
|
+
describe "no event @timestamp" do
|
58
|
+
let(:properties) { { "message" => "foo" } }
|
59
|
+
|
60
|
+
it "should not contain a @timestamp field in the mongo document" do
|
61
|
+
expect(event).to receive(:timestamp).and_return(nil)
|
62
|
+
expect(event).to receive(:to_hash).and_return(properties)
|
63
|
+
expect(collection).to receive(:insert_one).with(properties)
|
64
|
+
subject.receive(event)
|
65
|
+
end
|
66
|
+
end
|
67
|
+
|
68
|
+
describe "generateId" do
|
69
|
+
let(:properties) { { "message" => "foo" } }
|
70
|
+
let(:config) {{
|
71
|
+
"uri" => uri,
|
72
|
+
"database" => database,
|
73
|
+
"collection" => collection,
|
74
|
+
"generateId" => true
|
75
|
+
}}
|
76
|
+
|
77
|
+
it "should contain a BSON::ObjectId as _id" do
|
78
|
+
expect(BSON::ObjectId).to receive(:new).and_return("BSON::ObjectId")
|
79
|
+
expect(event).to receive(:timestamp).and_return(nil)
|
80
|
+
expect(event).to receive(:to_hash).and_return(properties)
|
81
|
+
expect(collection).to receive(:insert_one).with(properties.merge("_id" => "BSON::ObjectId"))
|
82
|
+
subject.receive(event)
|
83
|
+
end
|
84
|
+
end
|
85
|
+
end
|
47
86
|
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-output-mongodb
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 3.1.
|
4
|
+
version: 3.1.5
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Elastic
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date:
|
11
|
+
date: 2019-03-25 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|
@@ -119,7 +119,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
119
119
|
version: '0'
|
120
120
|
requirements: []
|
121
121
|
rubyforge_project:
|
122
|
-
rubygems_version: 2.6.
|
122
|
+
rubygems_version: 2.6.13
|
123
123
|
signing_key:
|
124
124
|
specification_version: 4
|
125
125
|
summary: Writes events to MongoDB
|