log4rails 1.1.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/LICENSE.bsd +12 -0
- data/README.md +37 -0
- data/lib/log4r/GDC.rb +41 -0
- data/lib/log4r/MDC.rb +59 -0
- data/lib/log4r/NDC.rb +86 -0
- data/lib/log4r/base.rb +74 -0
- data/lib/log4r/config.rb +9 -0
- data/lib/log4r/configurator.rb +224 -0
- data/lib/log4r/formatter/formatter.rb +105 -0
- data/lib/log4r/formatter/log4jxmlformatter.rb +65 -0
- data/lib/log4r/formatter/patternformatter.rb +145 -0
- data/lib/log4r/lib/drbloader.rb +52 -0
- data/lib/log4r/lib/xmlloader.rb +24 -0
- data/lib/log4r/log4r-rails.yaml +60 -0
- data/lib/log4r/logevent.rb +28 -0
- data/lib/log4r/logger.rb +206 -0
- data/lib/log4r/loggerfactory.rb +89 -0
- data/lib/log4r/logserver.rb +28 -0
- data/lib/log4r/outputter/bufferedsyslogoutputter.rb +47 -0
- data/lib/log4r/outputter/consoleoutputters.rb +18 -0
- data/lib/log4r/outputter/datefileoutputter.rb +117 -0
- data/lib/log4r/outputter/emailoutputter.rb +143 -0
- data/lib/log4r/outputter/fileoutputter.rb +57 -0
- data/lib/log4r/outputter/iooutputter.rb +55 -0
- data/lib/log4r/outputter/outputter.rb +134 -0
- data/lib/log4r/outputter/outputterfactory.rb +60 -0
- data/lib/log4r/outputter/remoteoutputter.rb +40 -0
- data/lib/log4r/outputter/rollingfileoutputter.rb +234 -0
- data/lib/log4r/outputter/scribeoutputter.rb +37 -0
- data/lib/log4r/outputter/staticoutputter.rb +30 -0
- data/lib/log4r/outputter/syslogoutputter.rb +126 -0
- data/lib/log4r/outputter/udpoutputter.rb +53 -0
- data/lib/log4r/railtie.rb +211 -0
- data/lib/log4r/rdoc/GDC +14 -0
- data/lib/log4r/rdoc/MDC +16 -0
- data/lib/log4r/rdoc/NDC +41 -0
- data/lib/log4r/rdoc/configurator +243 -0
- data/lib/log4r/rdoc/emailoutputter +103 -0
- data/lib/log4r/rdoc/formatter +39 -0
- data/lib/log4r/rdoc/log4jxmlformatter +21 -0
- data/lib/log4r/rdoc/log4r +89 -0
- data/lib/log4r/rdoc/logger +175 -0
- data/lib/log4r/rdoc/logserver +85 -0
- data/lib/log4r/rdoc/outputter +108 -0
- data/lib/log4r/rdoc/patternformatter +128 -0
- data/lib/log4r/rdoc/scribeoutputter +16 -0
- data/lib/log4r/rdoc/syslogoutputter +29 -0
- data/lib/log4r/rdoc/win32eventoutputter +7 -0
- data/lib/log4r/rdoc/yamlconfigurator +20 -0
- data/lib/log4r/repository.rb +88 -0
- data/lib/log4r/staticlogger.rb +49 -0
- data/lib/log4r/version.rb +4 -0
- data/lib/log4r/yamlconfigurator.rb +198 -0
- data/lib/log4rails.rb +22 -0
- metadata +97 -0
@@ -0,0 +1,40 @@
|
|
1
|
+
# :nodoc:
|
2
|
+
require 'log4r/lib/drbloader'
|
3
|
+
require 'log4r/outputter/outputter'
|
4
|
+
|
5
|
+
module Log4r
|
6
|
+
# See log4r/logserver.rb
|
7
|
+
class RemoteOutputter < Outputter
|
8
|
+
|
9
|
+
def initialize(_name, hash={})
|
10
|
+
super(_name, hash)
|
11
|
+
@uri = (hash[:uri] or hash['uri'])
|
12
|
+
@buffsize = (hash[:buffsize] or hash['buffsize'] or 1).to_i
|
13
|
+
@buff = []
|
14
|
+
connect
|
15
|
+
end
|
16
|
+
|
17
|
+
if HAVE_ROMP
|
18
|
+
include ROMPClient
|
19
|
+
else
|
20
|
+
def initialize(*args)
|
21
|
+
raise RuntimeError, "LogServer not supported. ROMP is required", caller
|
22
|
+
end
|
23
|
+
end
|
24
|
+
|
25
|
+
|
26
|
+
# Call flush to send any remaining LogEvents to the remote server.
|
27
|
+
def flush
|
28
|
+
synch { send_buffer }
|
29
|
+
end
|
30
|
+
|
31
|
+
private
|
32
|
+
|
33
|
+
def canonical_log(logevent)
|
34
|
+
synch {
|
35
|
+
@buff.push logevent
|
36
|
+
send_buffer if @buff.size >= @buffsize
|
37
|
+
}
|
38
|
+
end
|
39
|
+
end
|
40
|
+
end
|
@@ -0,0 +1,234 @@
|
|
1
|
+
|
2
|
+
# :nodoc:
|
3
|
+
# Version:: $Id: rollingfileoutputter.rb,v 1.2 2009/09/29 18:13:13 colbygk Exp $
|
4
|
+
|
5
|
+
require "log4r/outputter/fileoutputter"
|
6
|
+
require "log4r/staticlogger"
|
7
|
+
|
8
|
+
require 'fileutils'
|
9
|
+
|
10
|
+
module Log4r
|
11
|
+
|
12
|
+
# RollingFileOutputter - subclass of FileOutputter that rolls files on size
|
13
|
+
# or time. So, given a filename of "error.log", the first log file will be "error000001.log".
|
14
|
+
# When its check condition is exceeded, it'll create and log to "error000002.log", etc.
|
15
|
+
#
|
16
|
+
# Additional hash arguments are:
|
17
|
+
#
|
18
|
+
# [<tt>:maxsize</tt>] Maximum size of the file in bytes.
|
19
|
+
# [<tt>:maxtime</tt>] Maximum age of the file in seconds.
|
20
|
+
# [<tt>:max_backups</tt>] Maxium number of prior log files to maintain. If max_backups is a positive number,
|
21
|
+
# then each time a roll happens, RollingFileOutputter will delete the oldest backup log files in excess
|
22
|
+
# of this number (if any). So, if max_backups is 10, then a maximum of 11 files will be maintained (the current
|
23
|
+
# log, plus 10 backups). If max_backups is 0, no backups will be kept. If it is negative (the default),
|
24
|
+
# there will be no limit on the number of files created. Note that the sequence numbers will continue to escalate;
|
25
|
+
# old sequence numbers are not reused.
|
26
|
+
# [<tt>:trunc</tt>] If true, deletes ALL existing log files (based on :filename) upon initialization,
|
27
|
+
# and the sequence numbering will start over at 000001. Otherwise continues logging where it left off
|
28
|
+
# last time (i.e. either to the file with the highest sequence number, or a new file, as appropriate).
|
29
|
+
class RollingFileOutputter < FileOutputter
|
30
|
+
|
31
|
+
attr_reader :current_sequence_number, :maxsize, :maxtime, :start_time, :max_backups
|
32
|
+
|
33
|
+
def initialize(_name, hash={})
|
34
|
+
super( _name, hash.merge({:create => false}) )
|
35
|
+
if hash.has_key?(:maxsize) || hash.has_key?('maxsize')
|
36
|
+
_maxsize = (hash[:maxsize] or hash['maxsize']).to_i
|
37
|
+
if _maxsize.class != Fixnum
|
38
|
+
raise TypeError, "Argument 'maxsize' must be an Fixnum", caller
|
39
|
+
end
|
40
|
+
if _maxsize == 0
|
41
|
+
raise TypeError, "Argument 'maxsize' must be > 0", caller
|
42
|
+
end
|
43
|
+
@maxsize = _maxsize
|
44
|
+
end
|
45
|
+
if hash.has_key?(:maxtime) || hash.has_key?('maxtime')
|
46
|
+
_maxtime = (hash[:maxtime] or hash['maxtime']).to_i
|
47
|
+
if _maxtime.class != Fixnum
|
48
|
+
raise TypeError, "Argument 'maxtime' must be an Fixnum", caller
|
49
|
+
end
|
50
|
+
if _maxtime == 0
|
51
|
+
raise TypeError, "Argument 'maxtime' must be > 0", caller
|
52
|
+
end
|
53
|
+
@maxtime = _maxtime
|
54
|
+
end
|
55
|
+
if hash.has_key?(:max_backups) || hash.has_key?('max_backups')
|
56
|
+
_max_backups = (hash[:max_backups] or hash['max_backups']).to_i
|
57
|
+
if _max_backups.class != Fixnum
|
58
|
+
raise TypeError, "Argument 'max_backups' must be an Fixnum", caller
|
59
|
+
end
|
60
|
+
@max_backups = _max_backups
|
61
|
+
else
|
62
|
+
@max_backups = -1
|
63
|
+
end
|
64
|
+
# @filename starts out as the file (including path) provided by the user, e.g. "\usr\logs\error.log".
|
65
|
+
# It will get assigned the current log file (including sequence number)
|
66
|
+
# @log_dir is the directory in which we'll log, e.g. "\usr\logs"
|
67
|
+
# @file_extension is the file's extension (if any) including any period, e.g. ".log"
|
68
|
+
# @core_file_name is the part of the log file's name, sans sequence digits or extension, e.g. "error"
|
69
|
+
@log_dir = File.dirname(@filename)
|
70
|
+
@file_extension = File.extname(@filename) # Note: the File API doc comment states that this doesn't include the period, but its examples and behavior do include it. We'll depend on the latter.
|
71
|
+
@core_file_name = File.basename(@filename, @file_extension)
|
72
|
+
if (@trunc)
|
73
|
+
purge_log_files(0)
|
74
|
+
end
|
75
|
+
@current_sequence_number = get_current_sequence_number()
|
76
|
+
makeNewFilename
|
77
|
+
# Now @filename points to a properly sequenced filename, which may or may not yet exist.
|
78
|
+
open_log_file('a')
|
79
|
+
|
80
|
+
# Note: it's possible we're already in excess of our time or size constraint for the current file;
|
81
|
+
# no worries -- if a new file needs to be started, it'll happen during the write() call.
|
82
|
+
end
|
83
|
+
|
84
|
+
#######
|
85
|
+
private
|
86
|
+
#######
|
87
|
+
|
88
|
+
# Delete all but the latest number_to_keep log files.
|
89
|
+
def purge_log_files(number_to_keep)
|
90
|
+
Dir.chdir(@log_dir) do
|
91
|
+
# Make a list of the log files to delete. Start with all of the matching log files...
|
92
|
+
glob = "#{@core_file_name}[0-9][0-9][0-9][0-9][0-9][0-9]#{@file_extension}"
|
93
|
+
files = Dir.glob(glob)
|
94
|
+
|
95
|
+
# ... if there are fewer than our threshold, just return...
|
96
|
+
if (files.size() <= number_to_keep )
|
97
|
+
# Logger.log_internal {"No log files need purging."}
|
98
|
+
return
|
99
|
+
end
|
100
|
+
# ...then remove those that we want to keep (i.e. the most recent #{number_to_keep} files).
|
101
|
+
files.sort!().slice!(-number_to_keep, number_to_keep)
|
102
|
+
|
103
|
+
# Delete the files. We use force (rm_f), so in case any files can't be deleted (e.g. someone's got one
|
104
|
+
# open in an editor), we'll swallow the error and keep going.
|
105
|
+
FileUtils.rm_f(files)
|
106
|
+
Logger.log_internal { "Purged #{files.length} log files: #{files}" }
|
107
|
+
end
|
108
|
+
end
|
109
|
+
|
110
|
+
# Get the highest existing log file sequence number, or 1 if there are no existing log files.
|
111
|
+
def get_current_sequence_number()
|
112
|
+
max_seq_no = 0
|
113
|
+
Dir.foreach(@log_dir) do |child|
|
114
|
+
if child =~ /^#{@core_file_name}(\d+)#{@file_extension}$/
|
115
|
+
seq_no = $1.to_i
|
116
|
+
if (seq_no > max_seq_no)
|
117
|
+
max_seq_no = seq_no
|
118
|
+
end
|
119
|
+
end
|
120
|
+
end
|
121
|
+
return [max_seq_no, 1].max
|
122
|
+
end
|
123
|
+
|
124
|
+
# perform the write
|
125
|
+
def write(data)
|
126
|
+
# we have to keep track of the file size ourselves - File.size doesn't
|
127
|
+
# seem to report the correct size when the size changes rapidly
|
128
|
+
@datasize += data.size + 1 # the 1 is for newline
|
129
|
+
roll if requiresRoll
|
130
|
+
super
|
131
|
+
end
|
132
|
+
|
133
|
+
# Constructs a new filename from the @current_sequence_number, @core_file_name, and @file_extension,
|
134
|
+
# and assigns it to @filename
|
135
|
+
def makeNewFilename
|
136
|
+
# note use of hard coded 6 digit sequence width - is this enough files?
|
137
|
+
padded_seq_no = "0" * (6 - @current_sequence_number.to_s.length) + @current_sequence_number.to_s
|
138
|
+
newbase = "#{@core_file_name}#{padded_seq_no}#{@file_extension}"
|
139
|
+
@filename = File.join(@log_dir, newbase)
|
140
|
+
end
|
141
|
+
|
142
|
+
# Open @filename with the given mode:
|
143
|
+
# 'a' - appends to the end of the file if it exists; otherwise creates it.
|
144
|
+
# 'w' - truncates the file to zero length if it exists, otherwise creates it.
|
145
|
+
# Re-initializes @datasize and @startime appropriately.
|
146
|
+
def open_log_file(mode)
|
147
|
+
# It appears that if a file has been recently deleted then recreated, calls like
|
148
|
+
# File.ctime can return the erstwhile creation time. File.size? can similarly return
|
149
|
+
# old information. So instead of simply doing ctime and size checks after File.new, we
|
150
|
+
# do slightly more complicated checks beforehand:
|
151
|
+
if (mode == 'w' || !File.exists?(@filename))
|
152
|
+
@start_time = Time.now()
|
153
|
+
@datasize = 0
|
154
|
+
else
|
155
|
+
@start_time = File.ctime(@filename)
|
156
|
+
@datasize = File.size?(@filename) || 0 # File.size? returns nil even if the file exists but is empty; we convert it to 0.
|
157
|
+
end
|
158
|
+
@out = File.new(@filename, mode)
|
159
|
+
Logger.log_internal {"File #{@filename} opened with mode #{mode}"}
|
160
|
+
end
|
161
|
+
|
162
|
+
# does the file require a roll?
|
163
|
+
def requiresRoll
|
164
|
+
if !@maxsize.nil? && @datasize > @maxsize
|
165
|
+
Logger.log_internal { "Rolling because #{@filename} (#{@datasize} bytes) has exceded the maxsize limit (#{@maxsize} bytes)." }
|
166
|
+
return true
|
167
|
+
end
|
168
|
+
if !@maxtime.nil? && (Time.now - @start_time) > @maxtime
|
169
|
+
Logger.log_internal { "Rolling because #{@filename} (created: #{@start_time}) has exceded the maxtime age (#{@maxtime} seconds)." }
|
170
|
+
return true
|
171
|
+
end
|
172
|
+
false
|
173
|
+
end
|
174
|
+
|
175
|
+
# roll the file
|
176
|
+
def roll
|
177
|
+
begin
|
178
|
+
# If @baseFilename == @filename, then this method is about to
|
179
|
+
# try to close out a file that is not actually opened because
|
180
|
+
# fileoutputter has been called with the parameter roll=true
|
181
|
+
# TODO: Is this check valid any more? I suspect not. Am commenting out...:
|
182
|
+
#if ( @baseFilename != @filename ) then
|
183
|
+
@out.close
|
184
|
+
#end
|
185
|
+
rescue
|
186
|
+
Logger.log_internal {
|
187
|
+
"RollingFileOutputter '#{@name}' could not close #{@filename}"
|
188
|
+
}
|
189
|
+
end
|
190
|
+
|
191
|
+
# Prepare the next file. (Note: if max_backups is zero, we can skip this; we'll
|
192
|
+
# just overwrite the existing log file)
|
193
|
+
if (@max_backups != 0)
|
194
|
+
@current_sequence_number += 1
|
195
|
+
makeNewFilename
|
196
|
+
end
|
197
|
+
|
198
|
+
open_log_file('w')
|
199
|
+
|
200
|
+
# purge any excess log files (unless max_backups is negative, which means don't purge).
|
201
|
+
if (@max_backups >= 0)
|
202
|
+
purge_log_files(@max_backups + 1)
|
203
|
+
end
|
204
|
+
|
205
|
+
end
|
206
|
+
|
207
|
+
end
|
208
|
+
|
209
|
+
end
|
210
|
+
|
211
|
+
# this can be found in examples/fileroll.rb as well
|
212
|
+
if __FILE__ == $0
|
213
|
+
require 'log4r'
|
214
|
+
include Log4r
|
215
|
+
|
216
|
+
|
217
|
+
timeLog = Logger.new 'WbExplorer'
|
218
|
+
timeLog.outputters = RollingFileOutputter.new("WbExplorer", { "filename" => "TestTime.log", "maxtime" => 10, "trunc" => true })
|
219
|
+
timeLog.level = DEBUG
|
220
|
+
|
221
|
+
100.times { |t|
|
222
|
+
timeLog.info "blah #{t}"
|
223
|
+
sleep(1.0)
|
224
|
+
}
|
225
|
+
|
226
|
+
sizeLog = Logger.new 'WbExplorer'
|
227
|
+
sizeLog.outputters = RollingFileOutputter.new("WbExplorer", { "filename" => "TestSize.log", "maxsize" => 16000, "trunc" => true })
|
228
|
+
sizeLog.level = DEBUG
|
229
|
+
|
230
|
+
10000.times { |t|
|
231
|
+
sizeLog.info "blah #{t}"
|
232
|
+
}
|
233
|
+
|
234
|
+
end
|
@@ -0,0 +1,37 @@
|
|
1
|
+
# :nodoc:
|
2
|
+
# Version:: $Id$
|
3
|
+
|
4
|
+
require "log4r/outputter/outputter"
|
5
|
+
require "rubygems"
|
6
|
+
require "scribe"
|
7
|
+
|
8
|
+
module Log4r
|
9
|
+
class ScribeOutputter < Outputter
|
10
|
+
attr_reader :host, :port, :category
|
11
|
+
|
12
|
+
def initialize(_name, hash={})
|
13
|
+
super(_name, hash)
|
14
|
+
@host = (hash[:host] or hash[:host] or 'localhost')
|
15
|
+
@port = (hash[:port] or hash[:port] or '1463')
|
16
|
+
@category = (hash[:category] or hash[:category] or 'default')
|
17
|
+
|
18
|
+
@client = Scribe.new("#{@host}:#{@port}", category=@category, add_newlines=false)
|
19
|
+
end
|
20
|
+
|
21
|
+
private
|
22
|
+
|
23
|
+
def write(data)
|
24
|
+
begin
|
25
|
+
@client.log(data.strip, @category)
|
26
|
+
rescue ScribeThrift::Client::TransportException => e
|
27
|
+
Logger.log_internal(-2) {
|
28
|
+
"Caught TransportException, is the scribe server alive?"
|
29
|
+
}
|
30
|
+
rescue ThriftClient::NoServersAvailable => e
|
31
|
+
Logger.log_internal(-2) {
|
32
|
+
"No scribe servers are available!"
|
33
|
+
}
|
34
|
+
end
|
35
|
+
end
|
36
|
+
end
|
37
|
+
end
|
@@ -0,0 +1,30 @@
|
|
1
|
+
# :nodoc:
|
2
|
+
module Log4r
|
3
|
+
|
4
|
+
class Outputter < Monitor
|
5
|
+
# Retrieve an outputter.
|
6
|
+
def self.[](name)
|
7
|
+
out = @@outputters[name]
|
8
|
+
if out.nil?
|
9
|
+
return case name
|
10
|
+
when 'stdout' then StdoutOutputter.new 'stdout'
|
11
|
+
when 'stderr' then StderrOutputter.new 'stderr'
|
12
|
+
else nil end
|
13
|
+
end
|
14
|
+
out
|
15
|
+
end
|
16
|
+
def self.stdout; Outputter['stdout'] end
|
17
|
+
def self.stderr; Outputter['stderr'] end
|
18
|
+
# Set an outputter.
|
19
|
+
def self.[]=(name, outputter)
|
20
|
+
@@outputters[name] = outputter
|
21
|
+
end
|
22
|
+
# Yields each outputter's name and reference.
|
23
|
+
def self.each
|
24
|
+
@@outputters.each {|name, outputter| yield name, outputter}
|
25
|
+
end
|
26
|
+
def self.each_outputter
|
27
|
+
@@outputters.each_value {|outputter| yield outputter}
|
28
|
+
end
|
29
|
+
end
|
30
|
+
end
|
@@ -0,0 +1,126 @@
|
|
1
|
+
# :include: ../rdoc/syslogoutputter
|
2
|
+
#
|
3
|
+
# Version:: $Id$
|
4
|
+
# Author:: Steve Lumos
|
5
|
+
# Author:: Leon Torres
|
6
|
+
|
7
|
+
require 'log4r/formatter/formatter'
|
8
|
+
require 'log4r/outputter/outputter'
|
9
|
+
require 'log4r/configurator'
|
10
|
+
require 'syslog'
|
11
|
+
|
12
|
+
module Log4r
|
13
|
+
|
14
|
+
SYSLOGNAMES = Hash.new
|
15
|
+
|
16
|
+
class SyslogOutputter < Outputter
|
17
|
+
include Syslog::Constants
|
18
|
+
|
19
|
+
# maps default log4r levels to syslog priorities (logevents never see ALL and OFF)
|
20
|
+
# SYSLOG Levels are:
|
21
|
+
# "DEBUG" => Syslog::LOG_DEBUG
|
22
|
+
# "INFO" => Syslog::LOG_INFO
|
23
|
+
# "NOTICE" => Syslog::LOG_NOTICE
|
24
|
+
# "WARN" => Syslog::LOG_WARN
|
25
|
+
# "ERROR" => Syslog::LOG_ERROR
|
26
|
+
# "FATAL" => Syslog::LOG_FATAL
|
27
|
+
# "ALERT" => Syslog::LOG_ALERT
|
28
|
+
# "EMERG" => Syslog::LOG_EMERG
|
29
|
+
SYSLOG_LEVELS_MAP = {
|
30
|
+
"DEBUG" => LOG_DEBUG,
|
31
|
+
"INFO" => LOG_INFO,
|
32
|
+
"NOTICE" => LOG_NOTICE, # by default NOTICE is not in log4r
|
33
|
+
"WARN" => LOG_WARNING,
|
34
|
+
"ERROR" => LOG_ERR,
|
35
|
+
"FATAL" => LOG_CRIT,
|
36
|
+
"ALERT" => LOG_ALERT, # by default ALERT is not in log4r
|
37
|
+
"EMERG" => LOG_EMERG, # by default EMERG is not in log4r
|
38
|
+
}
|
39
|
+
|
40
|
+
# mapping from Log4r default levels to syslog, by string name
|
41
|
+
# "DEBUG" => "DEBUG"
|
42
|
+
# "INFO" => "INFO"
|
43
|
+
# "WARN" => "WARN"
|
44
|
+
# "ERROR" => "ERROR"
|
45
|
+
# "FATAL" => "FATAL"
|
46
|
+
SYSLOG_LOG4R_MAP = {
|
47
|
+
"DEBUG" => "DEBUG",
|
48
|
+
"INFO" => "INFO",
|
49
|
+
"WARN" => "WARN",
|
50
|
+
"ERROR" => "ERROR",
|
51
|
+
"FATAL" => "FATAL"
|
52
|
+
# "NOTICE" => "INFO", # by default NOTICE is not in log4r
|
53
|
+
# "ALERT" => "FATAL", # by default ALERT is not in log4r
|
54
|
+
# "EMERG" => "FATAL" # by default EMERG is not in log4r
|
55
|
+
}
|
56
|
+
|
57
|
+
@levels_map = SYSLOG_LOG4R_MAP
|
58
|
+
|
59
|
+
# There are 3 hash arguments
|
60
|
+
#
|
61
|
+
# [<tt>:ident</tt>] syslog ident, defaults to _name
|
62
|
+
# [<tt>:logopt</tt>] syslog logopt, defaults to LOG_PID | LOG_CONS
|
63
|
+
# [<tt>:facility</tt>] syslog facility, defaults to LOG_USER
|
64
|
+
def initialize(_name, hash={})
|
65
|
+
super(_name, hash)
|
66
|
+
@ident = (hash[:ident] or hash['ident'] or _name)
|
67
|
+
@logopt = (hash[:logopt] or hash['logopt'] or LOG_PID | LOG_CONS).to_i
|
68
|
+
@facility = (hash[:facility] or hash['facility'] or LOG_USER).to_i
|
69
|
+
map_levels_by_name_to_syslog()
|
70
|
+
end
|
71
|
+
|
72
|
+
def closed?
|
73
|
+
@level == OFF
|
74
|
+
end
|
75
|
+
|
76
|
+
def close
|
77
|
+
@level = OFF
|
78
|
+
OutputterFactory.create_methods(self)
|
79
|
+
Logger.log_internal {"Outputter '#{@name}' closed Syslog and set to OFF"}
|
80
|
+
end
|
81
|
+
|
82
|
+
# A single hash argument that maps custom names to syslog names
|
83
|
+
#
|
84
|
+
# [<tt>levels_map</tt>] A map that will create a linkage between levels
|
85
|
+
# in a hash and underlying syslog levels.
|
86
|
+
# By default, these are direct mapping of the log4r
|
87
|
+
# levels (e.g. "DEBUG" => "DEBUG")
|
88
|
+
# If you have defined your own custom levels, you
|
89
|
+
# should provide this underlying mapping, otherwise
|
90
|
+
# all messages will be mapped to the underlying syslog
|
91
|
+
# level of INFO by default.
|
92
|
+
# e.g.
|
93
|
+
# You have created custom levels called:
|
94
|
+
# <tt>Configurator.custom_levels "HIGH", "MEDIUM", "LOW"</tt>
|
95
|
+
# To map these to 'equivilent' syslog levels, after instantiatin
|
96
|
+
# a syslogoutputter:
|
97
|
+
# <tt>SyslogOutputter.map_levels_by_name_to_syslog(
|
98
|
+
# { "HIGH" => "ALERT", "MEDIUM" => "WARN", "LOW" => "INFO" }
|
99
|
+
# )</tt>
|
100
|
+
def map_levels_by_name_to_syslog( lmap = SYSLOG_LOG4R_MAP )
|
101
|
+
@levels_map = lmap
|
102
|
+
end
|
103
|
+
|
104
|
+
def get_levels_map()
|
105
|
+
return @levels_map
|
106
|
+
end
|
107
|
+
|
108
|
+
private
|
109
|
+
|
110
|
+
def canonical_log(logevent)
|
111
|
+
pri = SYSLOG_LEVELS_MAP[@levels_map[LNAMES[logevent.level]]] rescue pri = LOG_INFO
|
112
|
+
o = format(logevent)
|
113
|
+
if o.kind_of? Exception then
|
114
|
+
msg = "#{o.class} at (#{o.backtrace[0]}): #{o.message}"
|
115
|
+
elsif o.respond_to? :to_str then
|
116
|
+
msg = o.to_str
|
117
|
+
else
|
118
|
+
msg = o.inspect
|
119
|
+
end
|
120
|
+
|
121
|
+
Syslog.open(@ident, @logopt, @facility) do |s|
|
122
|
+
s.log(pri, '%s', msg)
|
123
|
+
end
|
124
|
+
end
|
125
|
+
end
|
126
|
+
end
|