log2json-loggers 0.1.5 → 0.1.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. data/lib/log2json/railslogger.rb +9 -53
  2. metadata +63 -106
@@ -1,29 +1,26 @@
1
1
  # Use it like this in your config/environments/{staging,production}.rb:
2
2
  #
3
- # config.logger = ::Log2Json::create_custom_ralis_logger('/tmp/alternative.log', config)
3
+ # config.logger = ::Log2Json::create_custom_ralis_logger(config)
4
4
  #
5
5
  # Also, in unicorn.rb, add it like this:
6
6
  #
7
- # logger ::Log2Json::create_custom_unicorn_logger('/tmp/alternative.log', self)
7
+ # logger ::Log2Json::create_custom_unicorn_logger(self)
8
8
  #
9
9
 
10
10
  require 'logger'
11
11
 
12
12
  module Log2Json
13
13
 
14
- def self.create_custom_logger(to_path)
15
- logger = ::Logger.new(to_path)
16
- logger.formatter = proc do |severity, datetime, progname, msg|
14
+ def self.log_formatter
15
+ proc do |severity, datetime, progname, msg|
17
16
  "#{datetime.strftime('%Y-%m-%dT%H:%M:%S%z')}: [#{severity}] #{$$} #{msg.gsub(/\n/, '#012')}\n"
18
17
  end
19
- logger
20
18
  end
21
19
 
22
20
  # Create a custom logger that's just like the default Rails logger but
23
- # additionally logs to another file that has its own formatting for easier
24
- # parsing by a log2json log monitoring script.
21
+ # has its own formatting for easier parsing by a log2json log monitoring script.
25
22
  #
26
- def self.create_custom_rails_logger(to_path, config)
23
+ def self.create_custom_rails_logger(config)
27
24
  # Do what railties' bootstrap.rb does to initialize a default logger for a Rails app.
28
25
  path = config.paths["log"].first
29
26
  unless File.exist? File.dirname path
@@ -38,58 +35,17 @@ module Log2Json
38
35
  )
39
36
  logger.level = ActiveSupport::BufferedLogger.const_get(config.log_level.to_s.upcase)
40
37
 
41
- logger.extend(::Log2Json::Logger.broadcast(
42
- ::Log2Json::create_custom_logger(to_path)))
38
+ logger.formatter = ::Log2Json::log_formatter
43
39
  end
44
40
 
45
41
  # Simiar to the custom rails logger, but for unicorn.
46
42
  #
47
- def self.create_custom_unicorn_logger(to_path, config)
43
+ def self.create_custom_unicorn_logger(config)
48
44
  logger = ::Logger.new(config.set[:stderr_path])
49
- logger.extend(::Log2Json::Logger.broadcast(
50
- ::Log2Json::create_custom_logger(to_path)))
45
+ logger.formatter = ::Log2Json::log_formatter
51
46
  end
52
47
 
53
48
 
54
- # Code stolen from activesupport-4.0.0
55
- class Logger < ::Logger
56
-
57
- # Broadcasts logs to multiple loggers.
58
- def self.broadcast(logger) # :nodoc:
59
- Module.new do
60
- define_method(:add) do |*args, &block|
61
- logger.add(*args, &block)
62
- super(*args, &block)
63
- end
64
-
65
- define_method(:<<) do |x|
66
- logger << x
67
- super(x)
68
- end
69
-
70
- define_method(:close) do
71
- logger.close
72
- super()
73
- end
74
-
75
- define_method(:progname=) do |name|
76
- logger.progname = name
77
- super(name)
78
- end
79
-
80
- define_method(:formatter=) do |formatter|
81
- logger.formatter = formatter
82
- super(formatter)
83
- end
84
-
85
- define_method(:level=) do |level|
86
- logger.level = level
87
- super(level)
88
- end
89
- end
90
- end
91
- end
92
-
93
49
  end
94
50
 
95
51
 
metadata CHANGED
@@ -1,128 +1,85 @@
1
- --- !ruby/object:Gem::Specification
1
+ --- !ruby/object:Gem::Specification
2
2
  name: log2json-loggers
3
- version: !ruby/object:Gem::Version
4
- prerelease: false
5
- segments:
6
- - 0
7
- - 1
8
- - 5
9
- version: 0.1.5
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.1.6
5
+ prerelease:
10
6
  platform: ruby
11
- authors:
7
+ authors:
12
8
  - Jack Kuan
13
9
  autorequire:
14
10
  bindir: bin
15
11
  cert_chain: []
16
-
17
- date: 2013-09-13 00:00:00 -04:00
18
- default_executable:
12
+ date: 2013-09-30 00:00:00.000000000 Z
19
13
  dependencies: []
20
-
21
- description: |+
22
- Log2json lets you read, filter and send logs as JSON objects via Unix pipes.
23
- It is inspired by Logstash, and is meant to be compatible with it at the JSON
24
- event/record level so that it can easily work with Kibana.
25
-
26
- Reading logs is done via a shell script(eg, `tail`) running in its own process.
27
- You then configure(see the `syslog2json` or the `nginxlog2json` script for
28
- examples) and run your filters in Ruby using the `Log2Json` module and its
29
- contained helper classes.
30
-
31
- `Log2Json` reads from stdin the logs(one log record per line), parses the log
32
- lines into JSON records, and then serializes and writes the records to stdout,
33
- which then can be piped to another process for processing or sending it to
34
- somewhere else.
35
-
36
- Currently, Log2json ships with a `tail-log` script that can be run as the input
37
- process. It's the same as using the Linux `tail` utility with the `-v -F`
38
- options except that it also tracks the positions(as the numbers of lines read
39
- from the beginning of the files) in a few files in the file system so that if the
40
- input process is interrupted, it can continue reading from where it left off
41
- next time if the files had been followed. This feature is similar to the sincedb
42
- feature in Logstash's file input.
43
-
44
- Note: If you don't need the tracking feature(ie, you are fine with always
45
- tailling from the end of file with `-v -F -n0`), then you can just use the `tail`
46
- utility that comes with your Linux distribution.(Or more specifically, the
47
- `tail` from the GNU coreutils). Other versions of the `tail` utility may also
48
- work, but are not tested. The input protocol expected by Log2json is very
49
- simple and documented in the source code.
50
-
51
- ** The `tail-log` script uses a patched version of `tail` from the GNU coreutils
52
- package. A binary of the `tail` utility compiled for Ubuntu 12.04 LTS is
53
- included with the Log2json gem. If the binary doesn't work for your
54
- distribution, then you'll need to get GNU coreutils-8.13, apply the patch(it
55
- can be found in the src/ directory of the installed gem), and then replace
56
- the bin/tail binary in the directory of the installed gem with your version
57
- of the binary. **
58
-
59
- P.S. If you know of a way to configure and compile ONLY the tail program in
60
- coreutils, please let me know! The reason I'm not building tail post gem
61
- installation is that it takes too long to configure && make because that
62
- actually builds every utilties in coreutils.
63
-
64
-
65
- For shipping logs to Redis, there's the `lines2redis` script that can be used as
66
- the output process in the pipe. For shipping logs from Redis to ElasticSearch,
67
- Log2json provides a `redis2es` script.
68
-
69
- Finally here's an example of Log2json in action:
70
-
71
- From a client machine:
72
-
73
- tail-log /var/log/{sys,mail}log /var/log/{kern,auth}.log | syslog2json |
74
- queue=jsonlogs \
75
- flush_size=20 \
76
- flush_interval=30 \
77
- lines2redis host.to.redis.server 6379 0 # use redis DB 0
78
-
79
-
80
- On the Redis server:
81
-
82
- redis_queue=jsonlogs redis2es host.to.es.server
83
-
84
-
85
-
86
-
87
-
14
+ description: ! "Log2json lets you read, filter and send logs as JSON objects via Unix
15
+ pipes.\nIt is inspired by Logstash, and is meant to be compatible with it at the
16
+ JSON\nevent/record level so that it can easily work with Kibana. \n\nReading logs
17
+ is done via a shell script(eg, `tail`) running in its own process.\nYou then configure(see
18
+ the `syslog2json` or the `nginxlog2json` script for\nexamples) and run your filters
19
+ in Ruby using the `Log2Json` module and its\ncontained helper classes.\n\n`Log2Json`
20
+ reads from stdin the logs(one log record per line), parses the log\nlines into JSON
21
+ records, and then serializes and writes the records to stdout,\nwhich then can be
22
+ piped to another process for processing or sending it to\nsomewhere else.\n\nCurrently,
23
+ Log2json ships with a `tail-log` script that can be run as the input\nprocess. It's
24
+ the same as using the Linux `tail` utility with the `-v -F`\noptions except that
25
+ it also tracks the positions(as the numbers of lines read\nfrom the beginning of
26
+ the files) in a few files in the file system so that if the\ninput process is interrupted,
27
+ it can continue reading from where it left off\nnext time if the files had been
28
+ followed. This feature is similar to the sincedb\nfeature in Logstash's file input.\n\nNote:
29
+ If you don't need the tracking feature(ie, you are fine with always\ntailling from
30
+ the end of file with `-v -F -n0`), then you can just use the `tail`\nutility that
31
+ comes with your Linux distribution.(Or more specifically, the\n`tail` from the GNU
32
+ coreutils). Other versions of the `tail` utility may also\nwork, but are not tested.
33
+ The input protocol expected by Log2json is very\nsimple and documented in the source
34
+ code.\n\n** The `tail-log` script uses a patched version of `tail` from the GNU
35
+ coreutils\n package. A binary of the `tail` utility compiled for Ubuntu 12.04
36
+ LTS is\n included with the Log2json gem. If the binary doesn't work for your\n
37
+ \ distribution, then you'll need to get GNU coreutils-8.13, apply the patch(it\n
38
+ \ can be found in the src/ directory of the installed gem), and then replace\n
39
+ \ the bin/tail binary in the directory of the installed gem with your version\n
40
+ \ of the binary. ** \n\nP.S. If you know of a way to configure and compile ONLY
41
+ the tail program in\n coreutils, please let me know! The reason I'm not building
42
+ tail post gem\n installation is that it takes too long to configure && make
43
+ because that\n actually builds every utilties in coreutils.\n\n\nFor shipping
44
+ logs to Redis, there's the `lines2redis` script that can be used as\nthe output
45
+ process in the pipe. For shipping logs from Redis to ElasticSearch,\nLog2json provides
46
+ a `redis2es` script.\n\nFinally here's an example of Log2json in action:\n\nFrom
47
+ a client machine:\n\n tail-log /var/log/{sys,mail}log /var/log/{kern,auth}.log
48
+ | syslog2json |\n queue=jsonlogs \\\n flush_size=20 \\\n flush_interval=30
49
+ \\\n lines2redis host.to.redis.server 6379 0 # use redis DB 0\n\n\nOn the
50
+ Redis server:\n\n redis_queue=jsonlogs redis2es host.to.es.server\n\n\n\nResources
51
+ that help writing log2json filters:\n - look at log2json.rb source and example
52
+ filters\n - http://grokdebug.herokuapp.com/\n - http://www.ruby-doc.org/stdlib-1.9.3/libdoc/date/rdoc/DateTime.html#method-i-strftime\n\n"
88
53
  email: jack.kuan@thescore.com
89
54
  executables: []
90
-
91
55
  extensions: []
92
-
93
56
  extra_rdoc_files: []
94
-
95
- files:
57
+ files:
96
58
  - lib/log2json/railslogger.rb
97
- has_rdoc: true
98
59
  homepage:
99
60
  licenses: []
100
-
101
61
  post_install_message:
102
62
  rdoc_options: []
103
-
104
- require_paths:
63
+ require_paths:
105
64
  - lib
106
- required_ruby_version: !ruby/object:Gem::Requirement
107
- requirements:
108
- - - ">="
109
- - !ruby/object:Gem::Version
110
- segments:
111
- - 0
112
- version: "0"
113
- required_rubygems_version: !ruby/object:Gem::Requirement
114
- requirements:
115
- - - ">="
116
- - !ruby/object:Gem::Version
117
- segments:
118
- - 0
119
- version: "0"
65
+ required_ruby_version: !ruby/object:Gem::Requirement
66
+ none: false
67
+ requirements:
68
+ - - ! '>='
69
+ - !ruby/object:Gem::Version
70
+ version: '0'
71
+ required_rubygems_version: !ruby/object:Gem::Requirement
72
+ none: false
73
+ requirements:
74
+ - - ! '>='
75
+ - !ruby/object:Gem::Version
76
+ version: '0'
120
77
  requirements: []
121
-
122
78
  rubyforge_project:
123
- rubygems_version: 1.3.6
79
+ rubygems_version: 1.8.23
124
80
  signing_key:
125
81
  specification_version: 3
126
- summary: Custom loggers for Rails and Unicorn that log to another file in addition to what their default loggers do.
82
+ summary: Custom loggers for Rails and Unicorn that use log2json's single-line log
83
+ format.
127
84
  test_files: []
128
-
85
+ has_rdoc: