logstash-input-mongodb 0.1.0 → 0.1.1

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 069aa95b8c9675b2079f871d66a496da135741f8
4
- data.tar.gz: ee102f0da33e5a562bcc4facfa28a63f743a466a
3
+ metadata.gz: 797d63879b842bd324be17f4afafeb142cbc16d4
4
+ data.tar.gz: 8becb42077309b41d88a2b1808d00980433af012
5
5
  SHA512:
6
- metadata.gz: 32c33f58cd391152b7c0ce788007cff3428c3b675a9a2734039d00d88b989ca921623b7b398b60c250ff38eb6c42187b4576552b2ccd40a1649e2e914c9db1a5
7
- data.tar.gz: f5e8da5a9250404735a6d5c0d92c2870c14e73c8bab1768a0da522805ef5c2527ce7c011d197d154d4803e1224bd9e99e77d43b722dfbe379e6ea80e295ab9fb
6
+ metadata.gz: 9b0f0be58b4a1b57cd24d8560e5bb71649bf93775e33bc570c94ac30a39c2fecc2baa5e88f00e80058af69dc89594835ec90747fbe9cc196043337fd89b3043b
7
+ data.tar.gz: e97852fee59bba3b14739fef7a89f6fcde0e9f29a3eede187d006688c68f178ebe668b6c144cff1e4fa72ff4a82ab213835d7e19446bf557006b51eeda41219d
@@ -79,12 +79,24 @@ class LogStash::Inputs::MongoDB < LogStash::Inputs::Base
79
79
  end
80
80
  end
81
81
 
82
+ public
83
+ def init_placeholder(sqlitedb, since_table, mongodb, mongo_collection_name)
84
+ @logger.debug("init placeholder for #{since_table}_#{mongo_collection_name}")
85
+ since = sqlitedb[SINCE_TABLE]
86
+ mongo_collection = mongodb.collection(mongo_collection_name)
87
+ first_entry = mongo_collection.find({}, :sort => ['_id', Mongo::ASCENDING], :limit => 1).first
88
+ first_entry_id = first_entry['_id'].to_s
89
+ since.insert(:table => "#{since_table}_#{mongo_collection_name}", :place => first_entry_id)
90
+ return first_entry_id
91
+ end
92
+
82
93
  public
83
94
  def get_placeholder(sqlitedb, since_table, mongodb, mongo_collection_name)
84
95
  since = sqlitedb[SINCE_TABLE]
85
96
  x = since.where(:table => "#{since_table}_#{mongo_collection_name}")
86
97
  if x[:place].nil? || x[:place] == 0
87
98
  first_entry_id = init_placeholder(sqlitedb, since_table, mongodb, mongo_collection_name)
99
+ @logger.debug("FIRST ENTRY ID for #{mongo_collection_name} is #{first_entry_id}")
88
100
  return first_entry_id
89
101
  else
90
102
  @logger.debug("placeholder already exists, it is #{x[:place]}")
@@ -92,20 +104,9 @@ class LogStash::Inputs::MongoDB < LogStash::Inputs::Base
92
104
  end
93
105
  end
94
106
 
95
- public
96
- def init_placeholder(sqlitedb, since_table, mongodb, mongo_collection_name)
97
- @logger.debug("init placeholder for #{since_table}_#{mongo_collection_name}")
98
- since = sqlitedb[SINCE_TABLE]
99
- mongo_collection = mongodb.collection(mongo_collection_name)
100
- first_entry = mongo_collection.find_one({})
101
- first_entry_id = first_entry['_id'].to_s
102
- since.insert(:table => "#{since_table}_#{mongo_collection_name}", :place => first_entry_id)
103
- return first_entry_id
104
- end
105
-
106
107
  public
107
108
  def update_placeholder(sqlitedb, since_table, mongo_collection_name, place)
108
- @logger.debug("updating placeholder for #{since_table}_#{mongo_collection_name} to #{place}")
109
+ #@logger.debug("updating placeholder for #{since_table}_#{mongo_collection_name} to #{place}")
109
110
  since = sqlitedb[SINCE_TABLE]
110
111
  since.where(:table => "#{since_table}_#{mongo_collection_name}").update(:place => place)
111
112
  end
@@ -130,7 +131,9 @@ class LogStash::Inputs::MongoDB < LogStash::Inputs::Base
130
131
  public
131
132
  def get_cursor_for_collection(mongodb, mongo_collection_name, last_id_object, batch_size)
132
133
  collection = mongodb.collection(mongo_collection_name)
133
- return collection.find({:_id => {:$gt => last_id_object}}).limit(batch_size)
134
+ # Need to make this sort by date in object id then get the first of the series
135
+ # db.events_20150320.find().limit(1).sort({ts:1})
136
+ return collection.find({:_id => {:$gte => last_id_object}}).limit(batch_size)
134
137
  end
135
138
 
136
139
  public
@@ -222,24 +225,23 @@ class LogStash::Inputs::MongoDB < LogStash::Inputs::Base
222
225
  collection_name = collection[:name]
223
226
  @logger.debug("collection_data is: #{@collection_data}")
224
227
  last_id = @collection_data[index][:last_id]
225
- @logger.debug("last_id is #{last_id}", :index => index, :collection => collection_name)
228
+ #@logger.debug("last_id is #{last_id}", :index => index, :collection => collection_name)
226
229
  # get batch of events starting at the last_place if it is set
227
230
  last_id_object = BSON::ObjectId(last_id)
228
231
  cursor = get_cursor_for_collection(@mongodb, collection_name, last_id_object, batch_size)
229
232
  cursor.each do |doc|
230
- @logger.debug("Date from mongo: #{doc['_id'].generation_time.to_s}")
231
233
  logdate = DateTime.parse(doc['_id'].generation_time.to_s)
232
- @logger.debug("logdate.iso8601: #{logdate.iso8601}")
233
234
  event = LogStash::Event.new("host" => @host)
234
235
  decorate(event)
235
236
  event["logdate"] = logdate.iso8601
236
- @logger.debug("type of doc is: "+doc.class.to_s)
237
237
  log_entry = doc.to_h.to_s
238
238
  log_entry['_id'] = log_entry['_id'].to_s
239
239
  event["log_entry"] = log_entry
240
- @logger.debug("EVENT looks like: "+event.to_s)
241
- @logger.debug("Sent message: "+doc.to_h.to_s)
242
- @logger.debug("EVENT looks like: "+event.to_s)
240
+ event["mongo_id"] = doc['_id'].to_s
241
+ @logger.debug("mongo_id: "+doc['_id'].to_s)
242
+ #@logger.debug("EVENT looks like: "+event.to_s)
243
+ #@logger.debug("Sent message: "+doc.to_h.to_s)
244
+ #@logger.debug("EVENT looks like: "+event.to_s)
243
245
  # Extract the HOST_ID and PID from the MongoDB BSON::ObjectID
244
246
  if @unpack_mongo_id
245
247
  doc_obj_bin = doc['_id'].to_a.pack("C*").unpack("a4 a3 a2 a3")
@@ -253,6 +255,14 @@ class LogStash::Inputs::MongoDB < LogStash::Inputs::Base
253
255
  # Flatten the JSON so that the data is usable in Kibana
254
256
  flat_doc = flatten(doc)
255
257
  # Check for different types of expected values and add them to the event
258
+ if flat_doc['info_message'] && (flat_doc['info_message'] =~ /collection stats: .+/)
259
+ # Some custom stuff I'm having to do to fix formatting in past logs...
260
+ sub_value = flat_doc['info_message'].sub("collection stats: ", "")
261
+ JSON.parse(sub_value).each do |k1,v1|
262
+ flat_doc["collection_stats_#{k1.to_s}"] = v1
263
+ end
264
+ end
265
+
256
266
  flat_doc.each do |k,v|
257
267
  # Check for an integer
258
268
  if /\A[-+]?\d+[.][\d]+\z/ === v
@@ -303,11 +313,12 @@ class LogStash::Inputs::MongoDB < LogStash::Inputs::Base
303
313
 
304
314
  queue << event
305
315
  @collection_data[index][:last_id] = doc['_id'].to_s
306
- @collection_data = update_watched_collections(@mongodb, @collection, @sqlitedb)
307
316
  end
308
317
  # Store the last-seen doc in the database
309
318
  update_placeholder(@sqlitedb, since_table, collection_name, @collection_data[index][:last_id])
310
319
  end
320
+ @logger.debug("Updating watch collections")
321
+ @collection_data = update_watched_collections(@mongodb, @collection, @sqlitedb)
311
322
 
312
323
  # nothing found in that iteration
313
324
  # sleep a bit
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-input-mongodb'
3
- s.version = '0.1.0'
3
+ s.version = '0.1.1'
4
4
  s.licenses = ['Apache License (2.0)']
5
5
  s.summary = "This takes entries from mongodb as an input to logstash."
6
6
  s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/plugin install gemname. This gem is not a stand-alone program"
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-input-mongodb
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.0
4
+ version: 0.1.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Philip Hutchins
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2015-03-18 00:00:00.000000000 Z
11
+ date: 2015-03-30 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: logstash