log2json 0.1.9 → 0.1.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/bin/redis2es CHANGED
@@ -41,11 +41,11 @@ const(:LOG_ENCODING, 'UTF-8')
41
41
  const(:LOG_INDEX_NAME, 'log2json-%Y.%m.%d')
42
42
 
43
43
  # max number of log records allowed in the queue.
44
- const(:FLUSH_SIZE, 200)
44
+ const(:FLUSH_SIZE, 800)
45
45
 
46
46
  # flush the queue roughly every FLUSH_TIMEOUT seconds.
47
47
  # This value must be >= 2 and it must be a multiple of 2.
48
- const(:FLUSH_TIMEOUT, 60)
48
+ const(:FLUSH_TIMEOUT, 30)
49
49
  if FLUSH_TIMEOUT < 2 or FLUSH_TIMEOUT % 2 != 0
50
50
  STDERR.write("Invalid FLUSH_TIMEOUT=#{FLUSH_TIMEOUT}\n")
51
51
  exit 1
@@ -75,16 +75,16 @@ HTTP_LOG.level = Logger::WARN
75
75
  def flush_queue
76
76
  if not @queue.empty?
77
77
  req = Net::HTTP::Post.new('/_bulk')
78
- req.body = @queue.join("\n") + "\n"
78
+ req.body = @queue.join("\n")
79
79
  response = nil
80
80
  begin
81
81
  response = @@http.request(req)
82
82
  ensure
83
83
  if response.nil? or response.code != '200'
84
84
  LOG.error(response.body) if not response.nil?
85
- #FIXME: might be a good idea to push the undelivered log records to another queue in redis.
86
85
  LOG.warn("Failed sending bulk request(#{@queue.size} records) to ES! Logging the request body instead.")
87
86
  LOG.info("Failed request body:\n"+req.body)
87
+ #FIXME: write the queue to another file. This would allow us to resend these records on startup.
88
88
  end
89
89
  end
90
90
  @queue.clear()
@@ -107,8 +107,12 @@ def enqueue(logstr)
107
107
  log = JSON.load(logstr.force_encoding(LOG_ENCODING))
108
108
 
109
109
  # add header for each entry according to http://www.elasticsearch.org/guide/reference/api/bulk/
110
- @queue << {"index" => {"_index" => es_index(log["@timestamp"]), "_type" => log["@type"]}}.to_json
111
- @queue << log.to_json
110
+ @queue << (
111
+ {"index" => {"_index" => es_index(log["@timestamp"]), "_type" => log["@type"]}}.to_json <<
112
+ "\n" <<
113
+ log.to_json <<
114
+ "\n"
115
+ )
112
116
  end
113
117
 
114
118
  def load_redis_script
@@ -1,95 +1,59 @@
1
1
  # Use it like this in your config/environments/{staging,production}.rb:
2
2
  #
3
- # config.logger = ::Log2Json::create_custom_ralis_logger('/tmp/alternative.log', config)
3
+ # require 'log2json/railslogger'
4
+ # config.logger = ::Log2Json::create_custom_ralis_logger(config)
4
5
  #
5
6
  # Also, in unicorn.rb, add it like this:
6
7
  #
7
- # logger ::Log2Json::create_custom_unicorn_logger('/tmp/alternative.log', self)
8
+ # if %w(staging production).include?(ENV['RAILS_ENV'])
9
+ # require 'log2json/railslogger'
10
+ # logger ::Log2Json::create_custom_unicorn_logger(self)
11
+ # end
8
12
  #
9
13
 
10
14
  require 'logger'
11
-
15
+
12
16
  module Log2Json
13
17
 
14
- def self.create_custom_logger(to_path)
15
- logger = ::Logger.new(to_path)
16
- logger.formatter = proc do |severity, datetime, progname, msg|
18
+ def self.log_formatter
19
+ proc do |severity, datetime, progname, msg|
17
20
  "#{datetime.strftime('%Y-%m-%dT%H:%M:%S%z')}: [#{severity}] #{$$} #{msg.gsub(/\n/, '#012')}\n"
21
+ # Note: Following rsyslog's convention, all newlines are converted to '#012'.
18
22
  end
19
- logger
20
23
  end
21
24
 
22
- # Create a custom logger that's just like the default Rails logger but
23
- # additionally logs to another file that has its own formatting for easier
24
- # parsing by a log2json log monitoring script.
25
+ # Create a custom logger that uses its own formatting for easier parsing
26
+ # by a log2json log monitoring script.
25
27
  #
26
- def self.create_custom_rails_logger(to_path, config)
27
- # Do what railties' bootstrap.rb does to initialize a default logger for a Rails app.
28
- path = config.paths["log"].first
29
- unless File.exist? File.dirname path
30
- FileUtils.mkdir_p File.dirname path
28
+ def self.create_custom_rails_logger(config)
29
+ if config.respond_to?(:paths) # assume rails 3
30
+ path = config.paths["log"].first
31
+ unless File.exist? File.dirname path
32
+ FileUtils.mkdir_p File.dirname path
33
+ end
34
+ config.colorize_logging = false
35
+ else # assume rails 2
36
+ path = config.log_path
37
+ config.active_record.colorize_logging = false
38
+ end
39
+ logger = ::Logger.new(path)
40
+ logger.formatter = ::Log2Json::log_formatter
41
+ if defined?(ActiveSupport::TaggedLogging)
42
+ ActiveSupport::TaggedLogging.new(logger)
43
+ else
44
+ logger
31
45
  end
32
- f = File.open path, 'a'
33
- f.binmode
34
- f.sync = true # make sure every write flushes
35
-
36
- logger = ActiveSupport::TaggedLogging.new(
37
- ActiveSupport::BufferedLogger.new(f)
38
- )
39
- logger.level = ActiveSupport::BufferedLogger.const_get(config.log_level.to_s.upcase)
40
-
41
- logger.extend(::Log2Json::Logger.broadcast(
42
- ::Log2Json::create_custom_logger(to_path)))
43
46
  end
44
47
 
45
48
  # Simiar to the custom rails logger, but for unicorn.
46
49
  #
47
- def self.create_custom_unicorn_logger(to_path, config)
50
+ def self.create_custom_unicorn_logger(config)
48
51
  logger = ::Logger.new(config.set[:stderr_path])
49
- logger.extend(::Log2Json::Logger.broadcast(
50
- ::Log2Json::create_custom_logger(to_path)))
52
+ logger.formatter = ::Log2Json::log_formatter
53
+ logger
51
54
  end
52
55
 
53
56
 
54
- # Code stolen from activesupport-4.0.0
55
- class Logger < ::Logger
56
-
57
- # Broadcasts logs to multiple loggers.
58
- def self.broadcast(logger) # :nodoc:
59
- Module.new do
60
- define_method(:add) do |*args, &block|
61
- logger.add(*args, &block)
62
- super(*args, &block)
63
- end
64
-
65
- define_method(:<<) do |x|
66
- logger << x
67
- super(x)
68
- end
69
-
70
- define_method(:close) do
71
- logger.close
72
- super()
73
- end
74
-
75
- define_method(:progname=) do |name|
76
- logger.progname = name
77
- super(name)
78
- end
79
-
80
- define_method(:formatter=) do |formatter|
81
- logger.formatter = formatter
82
- super(formatter)
83
- end
84
-
85
- define_method(:level=) do |level|
86
- logger.level = level
87
- super(level)
88
- end
89
- end
90
- end
91
- end
92
-
93
57
  end
94
58
 
95
59
 
@@ -1,7 +1,7 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'log2json-loggers'
3
- s.version = '0.1.5'
4
- s.summary = "Custom loggers for Rails and Unicorn that log to another file in addition to what their default loggers do."
3
+ s.version = '0.1.9'
4
+ s.summary = "Custom loggers for Rails and Unicorn that use log2json's single-line log format."
5
5
  s.description = IO.read(File.join(File.dirname(__FILE__), 'README'))
6
6
  s.authors = ['Jack Kuan']
7
7
  s.email = 'jack.kuan@thescore.com'
data/log2json.gemspec CHANGED
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'log2json'
3
- s.version = '0.1.9'
3
+ s.version = '0.1.10'
4
4
  s.summary = "Read, filter and ship logs. ie, poor man's roll-your-own, light-weight logstash replacement."
5
5
  s.description = IO.read(File.join(File.dirname(__FILE__), 'README'))
6
6
  s.authors = ['Jack Kuan']
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: log2json
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.9
4
+ version: 0.1.10
5
5
  prerelease:
6
6
  platform: ruby
7
7
  authors:
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2013-10-01 00:00:00.000000000 Z
12
+ date: 2013-10-11 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: jls-grok