log2json-loggers 0.1.5

Sign up to get free protection for your applications and to get access to all the features.
Files changed (2) hide show
  1. data/lib/log2json/railslogger.rb +96 -0
  2. metadata +128 -0
@@ -0,0 +1,96 @@
1
+ # Use it like this in your config/environments/{staging,production}.rb:
2
+ #
3
+ # config.logger = ::Log2Json::create_custom_ralis_logger('/tmp/alternative.log', config)
4
+ #
5
+ # Also, in unicorn.rb, add it like this:
6
+ #
7
+ # logger ::Log2Json::create_custom_unicorn_logger('/tmp/alternative.log', self)
8
+ #
9
+
10
+ require 'logger'
11
+
12
+ module Log2Json
13
+
14
+ def self.create_custom_logger(to_path)
15
+ logger = ::Logger.new(to_path)
16
+ logger.formatter = proc do |severity, datetime, progname, msg|
17
+ "#{datetime.strftime('%Y-%m-%dT%H:%M:%S%z')}: [#{severity}] #{$$} #{msg.gsub(/\n/, '#012')}\n"
18
+ end
19
+ logger
20
+ end
21
+
22
+ # Create a custom logger that's just like the default Rails logger but
23
+ # additionally logs to another file that has its own formatting for easier
24
+ # parsing by a log2json log monitoring script.
25
+ #
26
+ def self.create_custom_rails_logger(to_path, config)
27
+ # Do what railties' bootstrap.rb does to initialize a default logger for a Rails app.
28
+ path = config.paths["log"].first
29
+ unless File.exist? File.dirname path
30
+ FileUtils.mkdir_p File.dirname path
31
+ end
32
+ f = File.open path, 'a'
33
+ f.binmode
34
+ f.sync = true # make sure every write flushes
35
+
36
+ logger = ActiveSupport::TaggedLogging.new(
37
+ ActiveSupport::BufferedLogger.new(f)
38
+ )
39
+ logger.level = ActiveSupport::BufferedLogger.const_get(config.log_level.to_s.upcase)
40
+
41
+ logger.extend(::Log2Json::Logger.broadcast(
42
+ ::Log2Json::create_custom_logger(to_path)))
43
+ end
44
+
45
+ # Simiar to the custom rails logger, but for unicorn.
46
+ #
47
+ def self.create_custom_unicorn_logger(to_path, config)
48
+ logger = ::Logger.new(config.set[:stderr_path])
49
+ logger.extend(::Log2Json::Logger.broadcast(
50
+ ::Log2Json::create_custom_logger(to_path)))
51
+ end
52
+
53
+
54
+ # Code stolen from activesupport-4.0.0
55
+ class Logger < ::Logger
56
+
57
+ # Broadcasts logs to multiple loggers.
58
+ def self.broadcast(logger) # :nodoc:
59
+ Module.new do
60
+ define_method(:add) do |*args, &block|
61
+ logger.add(*args, &block)
62
+ super(*args, &block)
63
+ end
64
+
65
+ define_method(:<<) do |x|
66
+ logger << x
67
+ super(x)
68
+ end
69
+
70
+ define_method(:close) do
71
+ logger.close
72
+ super()
73
+ end
74
+
75
+ define_method(:progname=) do |name|
76
+ logger.progname = name
77
+ super(name)
78
+ end
79
+
80
+ define_method(:formatter=) do |formatter|
81
+ logger.formatter = formatter
82
+ super(formatter)
83
+ end
84
+
85
+ define_method(:level=) do |level|
86
+ logger.level = level
87
+ super(level)
88
+ end
89
+ end
90
+ end
91
+ end
92
+
93
+ end
94
+
95
+
96
+
metadata ADDED
@@ -0,0 +1,128 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: log2json-loggers
3
+ version: !ruby/object:Gem::Version
4
+ prerelease: false
5
+ segments:
6
+ - 0
7
+ - 1
8
+ - 5
9
+ version: 0.1.5
10
+ platform: ruby
11
+ authors:
12
+ - Jack Kuan
13
+ autorequire:
14
+ bindir: bin
15
+ cert_chain: []
16
+
17
+ date: 2013-09-13 00:00:00 -04:00
18
+ default_executable:
19
+ dependencies: []
20
+
21
+ description: |+
22
+ Log2json lets you read, filter and send logs as JSON objects via Unix pipes.
23
+ It is inspired by Logstash, and is meant to be compatible with it at the JSON
24
+ event/record level so that it can easily work with Kibana.
25
+
26
+ Reading logs is done via a shell script(eg, `tail`) running in its own process.
27
+ You then configure(see the `syslog2json` or the `nginxlog2json` script for
28
+ examples) and run your filters in Ruby using the `Log2Json` module and its
29
+ contained helper classes.
30
+
31
+ `Log2Json` reads from stdin the logs(one log record per line), parses the log
32
+ lines into JSON records, and then serializes and writes the records to stdout,
33
+ which then can be piped to another process for processing or sending it to
34
+ somewhere else.
35
+
36
+ Currently, Log2json ships with a `tail-log` script that can be run as the input
37
+ process. It's the same as using the Linux `tail` utility with the `-v -F`
38
+ options except that it also tracks the positions(as the numbers of lines read
39
+ from the beginning of the files) in a few files in the file system so that if the
40
+ input process is interrupted, it can continue reading from where it left off
41
+ next time if the files had been followed. This feature is similar to the sincedb
42
+ feature in Logstash's file input.
43
+
44
+ Note: If you don't need the tracking feature(ie, you are fine with always
45
+ tailling from the end of file with `-v -F -n0`), then you can just use the `tail`
46
+ utility that comes with your Linux distribution.(Or more specifically, the
47
+ `tail` from the GNU coreutils). Other versions of the `tail` utility may also
48
+ work, but are not tested. The input protocol expected by Log2json is very
49
+ simple and documented in the source code.
50
+
51
+ ** The `tail-log` script uses a patched version of `tail` from the GNU coreutils
52
+ package. A binary of the `tail` utility compiled for Ubuntu 12.04 LTS is
53
+ included with the Log2json gem. If the binary doesn't work for your
54
+ distribution, then you'll need to get GNU coreutils-8.13, apply the patch(it
55
+ can be found in the src/ directory of the installed gem), and then replace
56
+ the bin/tail binary in the directory of the installed gem with your version
57
+ of the binary. **
58
+
59
+ P.S. If you know of a way to configure and compile ONLY the tail program in
60
+ coreutils, please let me know! The reason I'm not building tail post gem
61
+ installation is that it takes too long to configure && make because that
62
+ actually builds every utilties in coreutils.
63
+
64
+
65
+ For shipping logs to Redis, there's the `lines2redis` script that can be used as
66
+ the output process in the pipe. For shipping logs from Redis to ElasticSearch,
67
+ Log2json provides a `redis2es` script.
68
+
69
+ Finally here's an example of Log2json in action:
70
+
71
+ From a client machine:
72
+
73
+ tail-log /var/log/{sys,mail}log /var/log/{kern,auth}.log | syslog2json |
74
+ queue=jsonlogs \
75
+ flush_size=20 \
76
+ flush_interval=30 \
77
+ lines2redis host.to.redis.server 6379 0 # use redis DB 0
78
+
79
+
80
+ On the Redis server:
81
+
82
+ redis_queue=jsonlogs redis2es host.to.es.server
83
+
84
+
85
+
86
+
87
+
88
+ email: jack.kuan@thescore.com
89
+ executables: []
90
+
91
+ extensions: []
92
+
93
+ extra_rdoc_files: []
94
+
95
+ files:
96
+ - lib/log2json/railslogger.rb
97
+ has_rdoc: true
98
+ homepage:
99
+ licenses: []
100
+
101
+ post_install_message:
102
+ rdoc_options: []
103
+
104
+ require_paths:
105
+ - lib
106
+ required_ruby_version: !ruby/object:Gem::Requirement
107
+ requirements:
108
+ - - ">="
109
+ - !ruby/object:Gem::Version
110
+ segments:
111
+ - 0
112
+ version: "0"
113
+ required_rubygems_version: !ruby/object:Gem::Requirement
114
+ requirements:
115
+ - - ">="
116
+ - !ruby/object:Gem::Version
117
+ segments:
118
+ - 0
119
+ version: "0"
120
+ requirements: []
121
+
122
+ rubyforge_project:
123
+ rubygems_version: 1.3.6
124
+ signing_key:
125
+ specification_version: 3
126
+ summary: Custom loggers for Rails and Unicorn that log to another file in addition to what their default loggers do.
127
+ test_files: []
128
+