splog 0.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/lib/splog.rb ADDED
@@ -0,0 +1,495 @@
1
+ lib = File.expand_path('../../lib', __FILE__)
2
+ $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
3
+ require 'splog/version'
4
+ require 'date'
5
+ require 'optparse'
6
+ require 'yaml'
7
+ require 'json'
8
+ require 'enumerator'
9
+ require 'mongo'
10
+
11
+ include Mongo
12
+
13
+ module Splog
14
+
15
+ class LogParser
16
+ attr_accessor :config, :pattern_name, :options
17
+
18
+ # Define the accessors to mongo, all db writes happen to the configured @coll
19
+ attr_reader :client, :coll
20
+
21
+ def initialize
22
+ # Yaml config options
23
+ @config = {}
24
+
25
+ # Command line options
26
+ @options = {
27
+ :append => true
28
+ }
29
+
30
+ # Defines how each line is split apart with the array of regex
31
+ @pattern_name = nil
32
+ @pattern = nil
33
+
34
+ # Defines how each regex group is mapped to a data type
35
+ @mapping_name = nil
36
+ @mapping = nil
37
+
38
+ # Define the mongo client, nil by default until first persist to log entry
39
+ @client = nil
40
+
41
+ end
42
+
43
+ # http://stackoverflow.com/questions/6461812/creating-an-md5-hash-of-a-number-string-array-or-hash-in-ruby
44
+ def createsig(body)
45
+ Digest::MD5.hexdigest( sigflat body )
46
+ end
47
+
48
+ def sigflat(body)
49
+ if body.class == Hash
50
+ arr = []
51
+ body.each do |key, value|
52
+ arr << "#{sigflat key}=>#{sigflat value}"
53
+ end
54
+ body = arr
55
+ end
56
+ if body.class == Array
57
+ str = ''
58
+ body.map! do |value|
59
+ sigflat value
60
+ end.sort!.each do |value|
61
+ str << value
62
+ end
63
+ end
64
+ if body.class != String
65
+ body = body.to_s << body.class.to_s
66
+ end
67
+ body
68
+ end
69
+
70
+ def persist_log_entry(parsed_line)
71
+ begin
72
+ if @client.nil? and @options[:db_ref_name]
73
+ db_ref_name = @options[:db_ref_name]
74
+ host = @config['db_refs'][db_ref_name]['host'] || '127.0.0.1'
75
+ port = @config['db_refs'][db_ref_name]['port'] || 27107
76
+ user = @config['db_refs'][db_ref_name]['user'] || nil
77
+ pass = @config['db_refs'][db_ref_name]['pass'] || nil
78
+ db = @options[:mongo_db] || @config['db_refs'][db_ref_name]['db']
79
+ coll = @options[:mongo_coll] || @config['db_refs'][db_ref_name]['collection']
80
+
81
+ @client = MongoClient.new(host, port, :pool_size => 1)
82
+ db = @client.db(db)
83
+ auth = nil
84
+ if user and user != '' && pass
85
+ auth = db.authenticate(user, pass)
86
+ #p "Authentication to mongo returned: #{auth}"
87
+ end
88
+ @coll = db[coll]
89
+ end
90
+
91
+ # Assuming the above is successfull write to the collection, otherwise silently do nothing
92
+ if @client and @coll
93
+ # If an _id exists upsert the doc
94
+ if parsed_line.has_key?('_id')
95
+ @coll.update({:_id => parsed_line['_id']}, parsed_line, opts = {:upsert => true})
96
+ # Otherwise insert the parsed_line which will cause a Mongo specific _id to be generated
97
+ else
98
+ @coll.insert(parsed_line)
99
+ end
100
+ end
101
+ rescue => detail
102
+ $stderr.puts $!
103
+ end
104
+ end
105
+
106
+ def load_dot_file
107
+ # yml config
108
+ dot_file = @options[:dot_file_name] || '~/.splog.yml'
109
+ #puts "Loading dot_file from #{dot_file}"
110
+ begin
111
+ prop_list = YAML.load_file(File.expand_path(dot_file))
112
+ prop_list.each do |key, value|
113
+ @config[key] = value
114
+ end
115
+ rescue => detail
116
+ $stderr.puts "Unable to find or read #{dot_file}\n"
117
+ $stderr.puts $!
118
+ exit
119
+ end
120
+ end
121
+
122
+ def set_pattern(options)
123
+ @pattern_name = options[:pattern_name]
124
+ begin
125
+ @pattern = @config[options[:pattern_name]]['regex']
126
+ rescue => detail
127
+ puts "No pattern matching '#{options[:pattern_name]}' found. Please choose another name or define this pattern in the your .splog.yaml"
128
+ exit
129
+ end
130
+ end
131
+
132
+ def set_mapping(options)
133
+ begin
134
+ tmp = {}
135
+ @config[options[:pattern_name]]['mapping'].each { |x| tmp[x['name']] = x } unless @config[options[:pattern_name]]['mapping'].nil?
136
+ @mapping = tmp
137
+ rescue => detail
138
+ puts 'Unable to read the mapping in your .splog.yaml configuration. Please reference https://github.com/engineersamuel/splog for proper formatting.'
139
+ $stderr.puts $!
140
+ exit
141
+ end
142
+ end
143
+
144
+ # Attempt to parse an int or return 0
145
+ def parse_int(the_input)
146
+ output = 0
147
+ begin
148
+ output = the_input.to_i
149
+ rescue => detail
150
+ nil
151
+ end
152
+ output
153
+ end
154
+
155
+ # Attempt to parse a float or return 0
156
+ def parse_float(the_input)
157
+ output = 0
158
+ begin
159
+ output = the_input.to_f
160
+ rescue => detail
161
+ nil
162
+ end
163
+ output
164
+ end
165
+
166
+ # Attempt to parse a datetime or return None
167
+ def parse_datetime(the_input, the_format=nil)
168
+ output = the_input
169
+ begin
170
+ output = the_format ? DateTime.strptime(the_input, the_format) : DateTime.parse(the_input)
171
+ # Convert the time to utc for mongo
172
+ output = output.nil? ? nil : output.to_time.utc
173
+ rescue => detail
174
+ nil
175
+ end
176
+ output
177
+ end
178
+
179
+ def parse_line(line, opts={})
180
+ res = {}
181
+ parts = opts[:parts] || @config[@pattern_name]['regex']
182
+ begin
183
+ #pattern = re.compile(r'\s+'.join(parts)+r'\s*\Z')
184
+ pattern = @config[@pattern_name].has_key?('delim') ? "\\s*#{parts.join(@config[@pattern_name]['delim'])}\\s*" : "\\s*#{parts.join()}\\s*"
185
+ # MULTILINE to match the \n chars
186
+ #Regexp::MULTILINE | Regexp::IGNORECASE
187
+ r = Regexp.new(pattern, Regexp::MULTILINE)
188
+ m = r.match(line)
189
+ res = {}
190
+ if m
191
+ m.names.each do |group_name|
192
+ k = group_name
193
+ v = m[k]
194
+ # print("k: {}, v: {}".format(k, v))
195
+ if @mapping and @mapping.has_key?(k)
196
+ # print("self.mapping[k]: %s" % self.mapping[k])
197
+ if ['Int', 'Integer'].include? @mapping[k]['data_type']
198
+ res[k] = parse_int(m[k])
199
+ elsif ['Float'].include? @mapping[k]['data_type']
200
+ res[k] = parse_float(m[k])
201
+ elsif ['DateTime'].include? @mapping[k]['data_type']
202
+ res[k] = parse_datetime(m[k], @mapping[k]['format'])
203
+ end
204
+ else
205
+ res[k] = v
206
+ end
207
+ end
208
+ end
209
+ rescue => detail
210
+ $stderr.puts $!
211
+ detail.backtrace.each { |e| $stderr.puts e}
212
+ end
213
+
214
+ # If a key exists add the key to the parsed_line, This can help differentiate the log if not putting each
215
+ # Log into a unique collection, or even then helps differentiate the logs within a collection. Ex. if you had
216
+ # access_log and error_log in the same collection you may want a specific key for each of those
217
+ if @options[:key] && res && res.length != 0
218
+ res['key'] = @options[:key]
219
+ end
220
+
221
+ if @options[:md5] && res && res.length != 0
222
+ res['_id'] = createsig(res)
223
+ end
224
+
225
+ # Return nil if the hash hasn't been populated
226
+ res.length == 0 ? nil : res
227
+ end
228
+
229
+ # Takes an enum and iterates over it with logic to parse the log lines based on the configuration
230
+ def parse(enum_ref)
231
+ e = Enumerator.new do |y|
232
+ # Defines the current parsed line. Next linese can be added to this one potentially based on a key
233
+ current_working_line = nil
234
+ parsed_line = nil
235
+ begin
236
+ while enum_ref
237
+ line = enum_ref.next
238
+ parsed_line = parse_line(line)
239
+
240
+ next_line = enum_ref.peek
241
+ # Pass in the 'match_forward_regex' if it exists so the next line can be evaluated in this context
242
+ parsed_next_line = @config[@pattern_name]['match_forward_regex'].nil? ? parse_line(next_line) : parse_line(next_line, {:parts => @config[@pattern_name]['match_forward_regex']})
243
+
244
+ ############################################################################################################
245
+ # If the next line matches the match_forward_regex
246
+ ############################################################################################################
247
+ if parsed_next_line and @config[@pattern_name]['match_forward_regex']
248
+
249
+ # If the current_working_line does not yet exist, set it to the latest parsed line
250
+ if current_working_line.nil? and parsed_line
251
+ current_working_line = parsed_line
252
+ end
253
+
254
+ # Add to the match_forward_keyname_source from the match_forward_keyname_dest
255
+ current_working_line[@config[@pattern_name]['match_forward_keyname_source']] << parsed_next_line[@config[@pattern_name]['match_forward_keyname_source']]
256
+
257
+ # fast forward the enum one click to account for the peek
258
+ enum_ref.next
259
+
260
+ # Read until StopIteration or the match_forward_regex no longer matches
261
+ while true
262
+ # Only peek here to not advance the enum unnecessarily
263
+ sub_line = enum_ref.peek
264
+ parsed_sub_line = @config[@pattern_name]['match_forward_regex'].nil? ? nil : parse_line(sub_line, {:parts => @config[@pattern_name]['match_forward_regex']})
265
+ if parsed_sub_line
266
+ # if matched advance the enum and add the data to the current working line
267
+ enum_ref.next
268
+ current_working_line[@config[@pattern_name]['match_forward_keyname_source']] << parsed_sub_line[@config[@pattern_name]['match_forward_keyname_source']]
269
+ else
270
+ # Otherwise we've reached the end of the matched pattern yield this match out
271
+ y << current_working_line
272
+
273
+ # Since that is yielded, set the current_working_line to nil so it has a fresh start for the next iter
274
+ current_working_line = nil
275
+ break
276
+ end
277
+ end
278
+ ############################################################################################################
279
+ # Otherwise if the next line is nil but the parsed line matched and we are appending
280
+ ############################################################################################################
281
+ elsif parsed_line and parsed_next_line.nil? and @options[:append]
282
+ # If the current_working_line does not yet exist, set it to the latest parsed line
283
+ if current_working_line.nil? and parsed_line
284
+ current_working_line = parsed_line
285
+ end
286
+
287
+ # Read until StopIteration or a new parsed line is found
288
+ while true
289
+ # Only peek here to not advance the enum unnecessarily
290
+ sub_line = enum_ref.peek
291
+ parsed_sub_line = parse_line(sub_line)
292
+ if parsed_sub_line.nil? and @config[@pattern_name]['unmatched_append_key_name']
293
+ # if unmatched advance the enum and add the data to the current working line
294
+ enum_ref.next
295
+ current_working_line[@config[@pattern_name]['unmatched_append_key_name']] << sub_line
296
+ else
297
+ # Otherwise we've reached the end of the matched pattern yield this match out
298
+ y << current_working_line
299
+
300
+ # Since that is yielded, set the current_working_line to nil so it has a fresh start for the next iter
301
+ current_working_line = nil
302
+ break
303
+ end
304
+ end
305
+ ############################################################################################################
306
+ # Otherwise just your average joe matched line
307
+ ############################################################################################################
308
+ elsif parsed_line
309
+ y << parsed_line
310
+ end
311
+ end
312
+ rescue StopIteration => e
313
+ #if both current_working_line and parsed line yield them both as this situation can happen when peeking forward
314
+ # After an unmatched line
315
+ if current_working_line and parsed_line and current_working_line != parsed_line
316
+ y << current_working_line
317
+ y << parsed_line
318
+ # Yield point for a successfully parsed line
319
+ elsif current_working_line
320
+ y << current_working_line
321
+ else
322
+ y << parsed_line
323
+ end
324
+ end
325
+ end
326
+ end
327
+
328
+ def read_input(the_input)
329
+ # Split the input by lines, chomp them, and return an enum
330
+ #the_input.lines.map(&:chomp).to_enum
331
+ the_input.lines.to_enum
332
+ end
333
+
334
+ def read_log_file(file_name)
335
+ File.open(file_name).to_enum
336
+ end
337
+
338
+ def cli(args=nil)
339
+ options = {
340
+ :append => true,
341
+ :output => 'stdout',
342
+ :md5 => true # By defualt md5 the hash as the unique identifier
343
+ }
344
+ opts = OptionParser.new do |parser|
345
+ parser.banner = 'Usage: splog [options]'
346
+
347
+ parser.separator ''
348
+ parser.separator 'Parse logs in arbitrary formats defined in ~/.splog.yml:'
349
+
350
+ parser.on('-p', '--pattern STR', 'Mapping name defined in ~/.splog.yml') do |setting|
351
+ options[:pattern_name] = setting
352
+ end
353
+
354
+ parser.on('-f', '--file PATH', 'File to parse') do |setting|
355
+ options[:file_name] = setting ? File.expand_path(setting) : setting
356
+ end
357
+
358
+ parser.on('-c', '--config PATH', 'Optional dot file path. Defaults to ~/.splog.yml') do |setting|
359
+ options[:dot_file_name] = setting ? File.expand_path(setting) : setting
360
+ end
361
+
362
+ parser.on('-o', '--output [stdout|filename]', 'Defaults to stdout, if specifying just -o then defaults to no standard output.') do |setting|
363
+ options[:output] = setting ? setting : nil
364
+ end
365
+
366
+ parser.on('--no-append', "When a line doesn't match the regex, don't append it to the previously matched line. The default is to append.") do |setting|
367
+ options[:append] = setting.nil?
368
+ end
369
+
370
+ parser.on('-k', '--key STR', 'The unique business key to use as the database id. If none specified an automatic id will be generated.') do |setting|
371
+ options[:key] = setting
372
+ end
373
+
374
+ parser.on('-d', '--database STR', 'Specify a database reference defined in ~/.splog.yml to write to') do |ext|
375
+ options[:db_ref_name] = ext || nil
376
+ end
377
+
378
+ parser.on('--db STR', 'Override the Mongo database defined in ~/.splog.yml') do |ext|
379
+ options[:mongo_db] = ext || nil
380
+ end
381
+
382
+ parser.on('--coll STR', 'Override the Mongo collection defined in ~/.splog.yml') do |ext|
383
+ options[:mongo_coll] = ext || nil
384
+ end
385
+
386
+ parser.on('--[no-]md5', 'When saving to mongo md5 the hash and set that to the _id. This means repeated parses of the same log file should be idempotent. Otherwise there will be duplicated lines in the database.') do |ext|
387
+ p ext
388
+ options[:md5] = ext # if -m then == true
389
+ end
390
+
391
+ parser.on_tail('-h', '--help', '--usage', 'Show this usage message and quit.') do |setting|
392
+ puts parser.help
393
+ exit
394
+ end
395
+
396
+
397
+ #parser.on_tail("-v", "--version", "Show version information about this program and quit.") do
398
+ # puts "Splog v1.0.0"
399
+ # exit
400
+ #end
401
+ end
402
+
403
+ begin
404
+ if args and not args.length == 0
405
+ opts.parse!(args)
406
+ else
407
+ ARGV << '-h' if ARGV.size == 0
408
+ opts.parse!(ARGV)
409
+ end
410
+ rescue OptionParser::ParseError
411
+ $stderr.print "Error: #{$!}\n"
412
+ exit
413
+ end
414
+
415
+ if (options[:file_name] and options[:pattern_name]) or not $stdin.tty?
416
+ @options = options
417
+
418
+ # At this point the options are loaded so load the dot file before continuing so the config can be properly
419
+ # Loaded from the dot file and further options determined
420
+ load_dot_file
421
+
422
+ set_pattern(options)
423
+ set_mapping(options)
424
+
425
+ # Get the enum from the file
426
+ e = nil
427
+ if options[:file_name] and options[:pattern_name]
428
+ e = read_log_file(options[:file_name])
429
+ # Or stdin otherwise
430
+ elsif not $stdin.tty?
431
+ e = $stdin.to_enum
432
+ else
433
+ $stderr.print 'Please either specify a -f FILENAME or pipe content to splog.'
434
+ exit
435
+ end
436
+
437
+ # outputting to stdout simply prints 1 parsed line per line
438
+ if options[:output] == 'stdout'
439
+ # Parse each line of the file through the log parser
440
+ parse(e).each do |parsed_line|
441
+ if options[:db_ref_name]
442
+ persist_log_entry(parsed_line)
443
+ end
444
+
445
+ # Then write to stdout
446
+ $stdout.write parsed_line.to_s
447
+ $stdout.write "\n"
448
+ end
449
+
450
+ # outputting to json will construct a valid json array so you can do something like splog ... | prettyjson
451
+ elsif options[:output] == 'json'
452
+ # Parse each line of the file through the log parser
453
+ $stdout.write '['
454
+ pe = parse(e)
455
+ begin
456
+ while true
457
+ parsed_line = pe.next
458
+
459
+ if options[:db_ref_name]
460
+ persist_log_entry(parsed_line)
461
+ end
462
+
463
+ # Then write to stdout
464
+ $stdout.write parsed_line.to_json
465
+ $stdout.write ',' unless pe.peek.nil?
466
+ end
467
+ rescue => detail
468
+ nil
469
+ end
470
+ # If a \n is not written a % shows on the console output thus breaking the json array
471
+ $stdout.write "]\n"
472
+
473
+ # outputting nothing if -o given with no value. Useful for perf testing mainly
474
+ elsif options[:output] == nil
475
+ pe = parse(e)
476
+ begin
477
+ while true
478
+ parsed_line = pe.next
479
+ if options[:db_ref_name]
480
+ persist_log_entry(parsed_line)
481
+ end
482
+ end
483
+ rescue => detail
484
+ nil
485
+ end
486
+ # Otherwise return the enumerator back up to be iterated over either in testing or in a program requiring this code
487
+ else
488
+ return read_log_file(options[:file_name])
489
+ end
490
+ else
491
+ $stderr.print "Please either specify a -f FILENAME or pipe in content\n"
492
+ end
493
+ end
494
+ end
495
+ end
data/splog.gemspec ADDED
@@ -0,0 +1,29 @@
1
+ # coding: utf-8
2
+ lib = File.expand_path('../lib', __FILE__)
3
+ $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
4
+ require 'splog/version'
5
+
6
+ Gem::Specification.new do |spec|
7
+ spec.name = 'splog'
8
+ spec.version = Splog::VERSION
9
+ spec.authors = ['Samuel Mendenhall']
10
+ spec.email = ['Samuel.Mendenhall@gmail.com']
11
+ spec.description = %q{Parse any log file with yml defined regex rules}
12
+ spec.summary = %q{Parse any log file with yml defined regex rules}
13
+ spec.homepage = 'https://github.com/engineersamuel/splog'
14
+ spec.license = 'MIT'
15
+
16
+ spec.files = `git ls-files`.split($/)
17
+ spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
18
+ spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
19
+ spec.require_paths = ['lib']
20
+
21
+ # Add runtime dependencies
22
+ spec.add_runtime_dependency 'mongo'
23
+ spec.add_runtime_dependency 'bson_ext'
24
+
25
+ # Add development dependencies
26
+ spec.add_development_dependency 'bundler', '~> 1.3'
27
+ spec.add_development_dependency 'rake'
28
+ spec.add_development_dependency 'rspec', '~> 2.6'
29
+ end
@@ -0,0 +1,39 @@
1
+ apache_common:
2
+ delim: '\s+'
3
+ regex:
4
+ - '(?<Host>\S+)' # host %h
5
+ - '(?<Identity>\S+)' # indent %l (unused)
6
+ - '(?<User>\S+)' # user %u
7
+ - '\[(?<Time>.+)\]' # time %t
8
+ - '"(?<Request>.+)"' # request "%r"
9
+ - '(?<Status>[0-9]+)' # status %>s
10
+ - '(?<Size>\S+)' # size %b (careful can be '-')
11
+ - '"(?<Referer>.*)"' # referer "%{Referer}i"
12
+ - '"(?<UserAgent>.*)"' # user agent "%{User-agent}i"
13
+ mapping:
14
+ - name: Time
15
+ data_type: DateTime
16
+ format: '%d/%b/%Y:%H:%M:%S %z'
17
+ - name: Status
18
+ data_type: Integer
19
+ - name: Size
20
+ data_type: Integer
21
+
22
+ #http://httpd.apache.org/docs/2.4/mod/core.html#errorlogformat
23
+ # Apache logs in mixed formats to the error_log. The example is very generic and doesn't parse some more of the
24
+ # Expressive debug logging, but that could easily be done
25
+ apache_error:
26
+ delim: '\s+'
27
+ #http://regexr.com?36mdo
28
+ #[Wed Oct 02 19:24:09 2013] [info] LDAP: SSL support available
29
+ #[Wed Oct 02 19:27:10 2013] [debug] mod_proxy_ajp.c(265): proxy: APR_BUCKET_IS_EOS
30
+ #- '(?<File>.*?:)' # Source file name and line number of the log call
31
+ regex:
32
+ - '\[(?<Date>.+)\]' # time %t -- The first item in the log entry is the date and time of the message
33
+ - '\[(?<Severity>emerg|alert|crit|error|warn|notice|info|debug|trace)\]' # The second item lists the severity of the error being reported
34
+ - '(?<Module>.*?:)' # Name of the file/module logging the message depending on the level
35
+ - '(?<Message>.*)' # The actual log message
36
+ mapping:
37
+ - name: Date
38
+ data_type: DateTime
39
+ format: '%a %b %d %H:%M:%S %Y'
@@ -0,0 +1,50 @@
1
+ 127.0.0.103 - - [03/Oct/2013:12:31:00 -0400] "STATUS / HTTP/1.0" 200 86 "-" "ClusterListener/1.0"
2
+ 127.0.0.224 - - [03/Oct/2013:12:31:03 -0400] "STATUS / HTTP/1.0" 200 86 "-" "ClusterListener/1.0"
3
+ 127.0.0.229 - - [03/Oct/2013:12:32:36 -0400] "STATUS / HTTP/1.0" 500 648 "-" "ClusterListener/1.0"
4
+ 127.0.0.229 - - [03/Oct/2013:12:32:46 -0400] "INFO / HTTP/1.0" 200 3403 "-" "ClusterListener/1.0"
5
+ 127.0.0.229 - - [03/Oct/2013:12:32:46 -0400] "CONFIG / HTTP/1.0" 200 - "-" "ClusterListener/1.0"
6
+ 127.0.0.229 - - [03/Oct/2013:12:32:46 -0400] "ENABLE-APP / HTTP/1.0" 200 - "-" "ClusterListener/1.0"
7
+ 127.0.0.228 - - [03/Oct/2013:12:33:23 -0400] "STATUS / HTTP/1.0" 200 86 "-" "ClusterListener/1.0"
8
+ 127.0.0.120 - - [03/Oct/2013:12:33:30 -0400] "STATUS / HTTP/1.0" 200 86 "-" "ClusterListener/1.0"
9
+ 127.0.0.226 - - [03/Oct/2013:12:33:59 -0400] "STATUS / HTTP/1.0" 200 86 "-" "ClusterListener/1.0"
10
+ 127.0.0.126 - - [03/Oct/2013:12:34:00 -0400] "STATUS / HTTP/1.0" 200 86 "-" "ClusterListener/1.0"
11
+ 127.0.0.224 - - [03/Oct/2013:12:34:26 -0400] "REMOVE-APP / HTTP/1.0" 200 - "-" "ClusterListener/1.0"
12
+ 127.0.0.225 - - [03/Oct/2013:12:34:50 -0400] "STATUS / HTTP/1.0" 200 86 "-" "ClusterListener/1.0"
13
+ 127.0.0.224 - - [03/Oct/2013:12:35:01 -0400] "INFO / HTTP/1.0" 200 3652 "-" "ClusterListener/1.0"
14
+ 127.0.0.100 - - [03/Oct/2013:12:35:28 -0400] "STATUS / HTTP/1.0" 200 86 "-" "ClusterListener/1.0"
15
+ 127.0.0.122 - - [03/Oct/2013:12:35:40 -0400] "STATUS / HTTP/1.0" 200 86 "-" "ClusterListener/1.0"
16
+ 127.0.0.102 - - [03/Oct/2013:12:36:49 -0400] "STATUS / HTTP/1.0" 200 86 "-" "ClusterListener/1.0"
17
+ 127.0.0.103 - - [03/Oct/2013:12:37:28 -0400] "STATUS / HTTP/1.0" 200 86 "-" "ClusterListener/1.0"
18
+ 127.0.0.229 - - [03/Oct/2013:12:39:04 -0400] "STATUS / HTTP/1.0" 500 648 "-" "ClusterListener/1.0"
19
+ 127.0.0.229 - - [03/Oct/2013:12:39:14 -0400] "INFO / HTTP/1.0" 200 3317 "-" "ClusterListener/1.0"
20
+ 127.0.0.229 - - [03/Oct/2013:12:39:14 -0400] "CONFIG / HTTP/1.0" 200 - "-" "ClusterListener/1.0"
21
+ 127.0.0.229 - - [03/Oct/2013:12:39:14 -0400] "ENABLE-APP / HTTP/1.0" 200 - "-" "ClusterListener/1.0"
22
+ 127.0.0.1 - - [03/Oct/2013:12:39:39 -0400] "GET /x/ HTTP/1.1" 503 435 "-" "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; .NET CLR 1.0.3705; .NET CLR 1.1.4322; .NET CLR 2.0.50727; .NET CLR 3.0.04506.30; .NET CLR 3.0.04506.648; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; InfoPath.3; MS-RTC LM 8;)"
23
+ 127.0.0.1 - - [03/Oct/2013:12:39:49 -0400] "GET /x/ HTTP/1.1" 503 435 "-" "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; .NET CLR 1.0.3705; .NET CLR 1.1.4322; .NET CLR 2.0.50727; .NET CLR 3.0.04506.30; .NET CLR 3.0.04506.648; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; InfoPath.3; MS-RTC LM 8;)"
24
+ 127.0.0.228 - - [03/Oct/2013:12:39:51 -0400] "STATUS / HTTP/1.0" 200 86 "-" "ClusterListener/1.0"
25
+ 127.0.0.120 - - [03/Oct/2013:12:39:58 -0400] "STATUS / HTTP/1.0" 200 86 "-" "ClusterListener/1.0"
26
+ 127.0.0.226 - - [03/Oct/2013:12:40:27 -0400] "STATUS / HTTP/1.0" 200 86 "-" "ClusterListener/1.0"
27
+ 127.0.0.126 - - [03/Oct/2013:12:40:28 -0400] "STATUS / HTTP/1.0" 200 86 "-" "ClusterListener/1.0"
28
+ 127.0.0.225 - - [03/Oct/2013:12:41:18 -0400] "STATUS / HTTP/1.0" 200 86 "-" "ClusterListener/1.0"
29
+ 127.0.0.224 - - [03/Oct/2013:12:41:19 -0400] "CONFIG / HTTP/1.0" 200 - "-" "ClusterListener/1.0"
30
+ 127.0.0.224 - - [03/Oct/2013:12:41:19 -0400] "ENABLE-APP / HTTP/1.0" 200 - "-" "ClusterListener/1.0"
31
+ 127.0.0.100 - - [03/Oct/2013:12:41:56 -0400] "STATUS / HTTP/1.0" 200 86 "-" "ClusterListener/1.0"
32
+ 127.0.0.122 - - [03/Oct/2013:12:42:08 -0400] "STATUS / HTTP/1.0" 200 86 "-" "ClusterListener/1.0"
33
+ 127.0.0.224 - - [03/Oct/2013:12:42:42 -0400] "REMOVE-APP / HTTP/1.0" 200 - "-" "ClusterListener/1.0"
34
+ 127.0.0.102 - - [03/Oct/2013:12:43:17 -0400] "STATUS / HTTP/1.0" 200 86 "-" "ClusterListener/1.0"
35
+ 127.0.0.224 - - [03/Oct/2013:12:43:23 -0400] "INFO / HTTP/1.0" 200 3652 "-" "ClusterListener/1.0"
36
+ 127.0.0.103 - - [03/Oct/2013:12:43:56 -0400] "STATUS / HTTP/1.0" 200 86 "-" "ClusterListener/1.0"
37
+ 127.0.0.229 - - [03/Oct/2013:12:45:32 -0400] "STATUS / HTTP/1.0" 500 648 "-" "ClusterListener/1.0"
38
+ 127.0.0.229 - - [03/Oct/2013:12:45:42 -0400] "INFO / HTTP/1.0" 200 3317 "-" "ClusterListener/1.0"
39
+ 127.0.0.229 - - [03/Oct/2013:12:45:42 -0400] "CONFIG / HTTP/1.0" 200 - "-" "ClusterListener/1.0"
40
+ 127.0.0.229 - - [03/Oct/2013:12:45:42 -0400] "ENABLE-APP / HTTP/1.0" 200 - "-" "ClusterListener/1.0"
41
+ 127.0.0.228 - - [03/Oct/2013:12:46:19 -0400] "STATUS / HTTP/1.0" 200 86 "-" "ClusterListener/1.0"
42
+ 127.0.0.120 - - [03/Oct/2013:12:46:26 -0400] "STATUS / HTTP/1.0" 200 86 "-" "ClusterListener/1.0"
43
+ 127.0.0.226 - - [03/Oct/2013:12:46:55 -0400] "STATUS / HTTP/1.0" 200 86 "-" "ClusterListener/1.0"
44
+ 127.0.0.126 - - [03/Oct/2013:12:46:56 -0400] "STATUS / HTTP/1.0" 200 86 "-" "ClusterListener/1.0"
45
+ 127.0.0.225 - - [03/Oct/2013:12:47:46 -0400] "STATUS / HTTP/1.0" 200 86 "-" "ClusterListener/1.0"
46
+ 127.0.0.100 - - [03/Oct/2013:12:48:24 -0400] "STATUS / HTTP/1.0" 200 86 "-" "ClusterListener/1.0"
47
+ 127.0.0.122 - - [03/Oct/2013:12:48:36 -0400] "STATUS / HTTP/1.0" 200 86 "-" "ClusterListener/1.0"
48
+ 127.0.0.224 - - [03/Oct/2013:12:49:41 -0400] "CONFIG / HTTP/1.0" 200 - "-" "ClusterListener/1.0"
49
+ 127.0.0.224 - - [03/Oct/2013:12:49:41 -0400] "ENABLE-APP / HTTP/1.0" 200 - "-" "ClusterListener/1.0"
50
+ 127.0.0.102 - - [03/Oct/2013:12:49:45 -0400] "STATUS / HTTP/1.0" 200 86 "-" "ClusterListener/1.0"
@@ -0,0 +1,15 @@
1
+ [Wed Oct 02 19:24:09 2013] [info] APR LDAP: Built with OpenLDAP LDAP SDK
2
+ [Wed Oct 02 19:24:09 2013] [info] LDAP: SSL support available
3
+ [Wed Oct 02 19:24:24 2013] [debug] mod_proxy_cluster.c(386): Created: worker for ajp://10.218.6.229:8009 1 (status): 1
4
+ [Wed Oct 02 19:24:24 2013] [debug] mod_proxy_cluster.c(609): update_workers_node starting
5
+ [Wed Oct 02 19:27:10 2013] [debug] ajp_header.c(290): ajp_marshal_into_msgb: Header[25] [sm_email] = []
6
+ [Wed Oct 02 19:27:10 2013] [debug] ajp_header.c(290): ajp_marshal_into_msgb: Header[26] [sm_full_name] = [Joe Smith]
7
+ [Wed Oct 02 19:27:10 2013] [debug] ajp_header.c(290): ajp_marshal_into_msgb: Header[27] [X-Forwarded-For] = [127.0.0.1]
8
+ [Wed Oct 02 19:27:10 2013] [debug] ajp_header.c(290): ajp_marshal_into_msgb: Header[28] [X-Forwarded-Host] = [example.com]
9
+ [Wed Oct 02 19:27:10 2013] [debug] ajp_header.c(290): ajp_marshal_into_msgb: Header[29] [X-Forwarded-Server] = [localhost]
10
+ [Wed Oct 02 19:27:10 2013] [debug] ajp_header.c(290): ajp_marshal_into_msgb: Header[30] [Connection] = [Keep-Alive]
11
+ [Wed Oct 02 19:27:10 2013] [debug] ajp_header.c(450): ajp_marshal_into_msgb: Done
12
+ [Wed Oct 02 19:27:10 2013] [debug] mod_proxy_ajp.c(265): proxy: APR_BUCKET_IS_EOS
13
+ [Wed Oct 02 19:27:10 2013] [debug] mod_proxy_ajp.c(270): proxy: data to read (max 8186 at 4)
14
+ unparsed line
15
+ [Wed Oct 02 19:27:10 2013] [debug] mod_proxy_ajp.c(285): proxy: got 0 bytes of data
@@ -0,0 +1,2 @@
1
+ 127.0.0.103 - - [03/Oct/2013:12:31:00 -0400] "STATUS / HTTP/1.0" 200 86 "-" "ClusterListener/1.0"
2
+ 127.0.0.224 - - [03/Oct/2013:12:31:03 -0400] "STATUS / HTTP/1.0" 200 86 "-" "ClusterListener/1.0"