log2json 0.1.23 → 0.1.24

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA1:
3
+ metadata.gz: e3e168378db61e52503c7a5642fe792573beffed
4
+ data.tar.gz: c60bb1c1af3213c9f6c2cd049060da0aabb88aa0
5
+ SHA512:
6
+ metadata.gz: 0fc5c89b98cbff2bfcb8ebb6d2af702e3be88fa09e4c8e655ef700d34907ef57867ccdbb6f46f4a7bde140bdd82fd6c1df314d10341942ddcae5d29cbccb2396
7
+ data.tar.gz: 2ca8e93b7e69d28fa69c05ca8706133eef204e12e5703300f7c690916d01cff925ff2b47812c416bdc0c99cc33dbe0e25b90c1e72d3de886bde4cd83d5f3363a
@@ -10,9 +10,9 @@ set -e
10
10
  # it will also print "==> file.name <== [event]" to stdout whenever a file truncation or a new file is
11
11
  # detected. [event] will be one of "[new_file]" or "[truncated]".
12
12
  #
13
- # - It allows the use of multiple -n options. Each -n will apply to the files specified on the
14
- # command line in order.(ie, first -n N corresponds to the first file, etc.)
15
- # If there are more files listed than the number of -n options, then the last -n applies to the
13
+ # - It allows the use of multiple -n options. Each -n+N will apply to the files specified on the
14
+ # command line in order.(ie, first -n+N corresponds to the first file, etc.)
15
+ # If there are more files listed than the number of -n+N options, then the last -n+N applies to the
16
16
  # rest of the files.
17
17
  #
18
18
  TAIL=$(
@@ -35,6 +35,8 @@ IFS=$OIFS
35
35
  SINCEDB_DIR=${SINCEDB_DIR:-~/.tail-log}
36
36
  mkdir -p "$SINCEDB_DIR" || true
37
37
 
38
+ min() { if [[ $1 -lt $2 ]]; then echo $1; else echo $2; fi; }
39
+
38
40
  # Helper to build the arguments to tail.
39
41
  # Specifically, we expect the use of GNU tail as found in GNU coreutils.
40
42
  # It allows us to follow(with -F) files across rotations or truncations.
@@ -52,13 +54,11 @@ build_tail_args() {
52
54
  read line < "$sincedb_path"
53
55
  t=($line)
54
56
  # if inode number is unchanged and the current file size is not smaller
55
- # then we start tailing from 1 + the line number recorded in the sincedb.
57
+ # then we start tailing from the number of lines read(t[2]), as recorded in the sincedb,
58
+ # or from the last line, whichever is smaller, +1.
56
59
  if [[ ${t[0]} == $(stat -c "%i" "$fpath") &&
57
- ${t[1]} -le $(stat -c "%s" "$fpath") &&
58
- ${t[2]} -le "$nlines" ]]; then
59
- TAIL_ARGS[$((i++))]="-n+$((t[2] + 1))"
60
- # tail -n+N means start tailing from the N-th line of the file
61
- # and we're even allowed to specify different -n+N for different files!
60
+ ${t[1]} -le $(stat -c "%s" "$fpath") ]]; then
61
+ TAIL_ARGS[$((i++))]="-n+$(min $((t[2]+1)) $nlines)"
62
62
  TAIL_ARGS[$((i++))]=$fpath
63
63
  continue
64
64
  fi
@@ -27,33 +27,72 @@ def commit
27
27
  return if @fmap.nil?
28
28
  @fmap.each do |fpath, t|
29
29
  sincedb_path = "#{@sincedb_dir}/#{fpath}.since"
30
+ begin
31
+ fstat = File.stat(fpath)
32
+ t[1] = fstat.size
33
+ rescue
34
+ # empty
35
+ end
30
36
  FileUtils.mkdir_p(File.dirname(sincedb_path))
31
37
  IO.write(sincedb_path, "#{t.join(' ')}\n")
32
38
  end
33
39
  end
34
40
  at_exit(&method(:commit))
35
41
 
42
+
43
+ last_line = nil # the previous non-header output line from tail.
44
+ fpath = nil
36
45
  # Note: You probably want to set the locale env var: LC_CTYPE=en_US.UTF-8
37
46
  while line = gets()
38
47
  if line =~ /^==> (.+) <==(?: \[(.+)\])?$/
39
- fpath = $1
40
- if @fmap[fpath].nil? or $2 =~ /^new_file$|^truncated$/
48
+
49
+ last_fpath = fpath
50
+ fpath = $1; event = $2
51
+ if @fmap[fpath].nil? or event =~ /^new_file$|^truncated$/
41
52
  fstat = File.stat(fpath)
42
53
  @fmap[fpath] = [fstat.ino, fstat.size, 0]
43
54
  end
44
- if line =~ /<==$/
45
- STDOUT.write(line); STDOUT.flush()
46
- # Note: we don't print those '==> ... <== [....]' header lines that
47
- # got added by our patch to tail because they might not be followed
48
- # by lines from the indicating files.(eg, when a new empty file appears)
55
+
56
+ # When tracking multiple files, each output of a '==> ... <==' line actually first
57
+ # prints an empty line first, so that has to be taken into account.
58
+ if not last_fpath.nil? and event.nil? \
59
+ and last_line == "\n"
60
+ # NOTE: also check last_line because it's possible to have this at the beginning:
61
+ # ==> afile.txt <== [new_file]
62
+ # ==> afile.txt <==
63
+ # ...first line of log...
64
+ #
65
+ last_stat = @fmap[last_fpath]
66
+ last_stat[2] -= 1 if not last_stat.nil?
49
67
  end
68
+ #FIXME: it might be possible, although pretty pretty rare, that we got interrupted
69
+ # before we had the chance toadjust the -1 to the number of lines read.
70
+ # In such case, we'll record one more than the number of lines read!
71
+ # This might lead to skipping one log line by tail-log.sh.
72
+
73
+ #NOTE: regardless of the kinds of header lines, we always output them because of this
74
+ # special case:
75
+ #
76
+ # echo 11111 >> file1.txt; echo 22222 >> file2.txt
77
+ # tail -vF file1.txt file2.txt
78
+ # rm file1.txt file2.txt
79
+ # echo 222222222 >> file2.txt
80
+ # echo 111111111 >> file1.txt
81
+ #
82
+ # the last 111111111 won't have a '==> file1.txt <==' header line before it but only
83
+ # our '==> file1.txt <== [new_file]' header!
84
+ #
85
+ STDOUT.write(line); STDOUT.flush()
50
86
  next
51
87
  end
88
+
52
89
  STDOUT.write(line); STDOUT.flush()
53
90
  @fmap[fpath][2] += 1
54
91
  # Note: In the case of interruption, there's a chance that the line count is
55
92
  # one line behind the number of log lines written to stdout. This is
56
93
  # OK since we'd rather output a duplicate log line rather than miss
57
94
  # one.
95
+ #
96
+ last_line = line
58
97
  end
59
98
 
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'log2json'
3
- s.version = '0.1.23'
3
+ s.version = '0.1.24'
4
4
  s.summary = "Read, filter and ship logs. ie, poor man's roll-your-own, light-weight logstash replacement."
5
5
  s.description = IO.read(File.join(File.dirname(__FILE__), 'README'))
6
6
  s.authors = ['Jack Kuan']
metadata CHANGED
@@ -1,20 +1,18 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: log2json
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.23
5
- prerelease:
4
+ version: 0.1.24
6
5
  platform: ruby
7
6
  authors:
8
7
  - Jack Kuan
9
8
  autorequire:
10
9
  bindir: bin
11
10
  cert_chain: []
12
- date: 2014-03-03 00:00:00.000000000 Z
11
+ date: 2014-06-23 00:00:00.000000000 Z
13
12
  dependencies:
14
13
  - !ruby/object:Gem::Dependency
15
14
  name: jls-grok
16
15
  requirement: !ruby/object:Gem::Requirement
17
- none: false
18
16
  requirements:
19
17
  - - ~>
20
18
  - !ruby/object:Gem::Version
@@ -22,7 +20,6 @@ dependencies:
22
20
  type: :runtime
23
21
  prerelease: false
24
22
  version_requirements: !ruby/object:Gem::Requirement
25
- none: false
26
23
  requirements:
27
24
  - - ~>
28
25
  - !ruby/object:Gem::Version
@@ -30,7 +27,6 @@ dependencies:
30
27
  - !ruby/object:Gem::Dependency
31
28
  name: redis
32
29
  requirement: !ruby/object:Gem::Requirement
33
- none: false
34
30
  requirements:
35
31
  - - ~>
36
32
  - !ruby/object:Gem::Version
@@ -38,7 +34,6 @@ dependencies:
38
34
  type: :runtime
39
35
  prerelease: false
40
36
  version_requirements: !ruby/object:Gem::Requirement
41
- none: false
42
37
  requirements:
43
38
  - - ~>
44
39
  - !ruby/object:Gem::Version
@@ -46,7 +41,6 @@ dependencies:
46
41
  - !ruby/object:Gem::Dependency
47
42
  name: persistent_http
48
43
  requirement: !ruby/object:Gem::Requirement
49
- none: false
50
44
  requirements:
51
45
  - - ~>
52
46
  - !ruby/object:Gem::Version
@@ -54,12 +48,11 @@ dependencies:
54
48
  type: :runtime
55
49
  prerelease: false
56
50
  version_requirements: !ruby/object:Gem::Requirement
57
- none: false
58
51
  requirements:
59
52
  - - ~>
60
53
  - !ruby/object:Gem::Version
61
54
  version: 1.0.6
62
- description: ! "Log2json lets you read, filter and send logs as JSON objects via Unix
55
+ description: "Log2json lets you read, filter and send logs as JSON objects via Unix
63
56
  pipes.\nIt is inspired by Logstash, and is meant to be compatible with it at the
64
57
  JSON\nevent/record level so that it can easily work with Kibana. \n\nReading logs
65
58
  is done via a shell script(eg, `tail`) running in its own process.\nYou then configure(see
@@ -133,27 +126,26 @@ files:
133
126
  - src/tail.c
134
127
  homepage:
135
128
  licenses: []
129
+ metadata: {}
136
130
  post_install_message:
137
131
  rdoc_options: []
138
132
  require_paths:
139
133
  - lib
140
134
  required_ruby_version: !ruby/object:Gem::Requirement
141
- none: false
142
135
  requirements:
143
- - - ! '>='
136
+ - - '>='
144
137
  - !ruby/object:Gem::Version
145
138
  version: '0'
146
139
  required_rubygems_version: !ruby/object:Gem::Requirement
147
- none: false
148
140
  requirements:
149
- - - ! '>='
141
+ - - '>='
150
142
  - !ruby/object:Gem::Version
151
143
  version: '0'
152
144
  requirements: []
153
145
  rubyforge_project:
154
- rubygems_version: 1.8.23
146
+ rubygems_version: 2.3.0
155
147
  signing_key:
156
- specification_version: 3
148
+ specification_version: 4
157
149
  summary: Read, filter and ship logs. ie, poor man's roll-your-own, light-weight logstash
158
150
  replacement.
159
151
  test_files: []