anschel 0.6.5 → 0.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: bbe6eb2c7595931308ffa51a7c5c246d70bc0bd2
4
- data.tar.gz: c982d14ef5c18395f19dea1803875ee81dce3377
3
+ metadata.gz: 4d2323c2b9351e3241a9f57d0a0d758b1a191748
4
+ data.tar.gz: 121cbb082eacb6052e330c009ee7e8ca2a553bf0
5
5
  SHA512:
6
- metadata.gz: a7664952bdf55874f8f883dead99b346034d565462d11e37246ce9437f0d730721eb1726333e492731f159bae02779c5cdbbeaeea80655135fe51ef7a49b6841
7
- data.tar.gz: 9dccba8d5cd9cef7ce7e841c95a224807a297091503a9eef98787e75f9d22fbd751149638bca0e0af93ac6d4b4d2f77bb23ec37a0186bfc389e10e9f5bd2f083
6
+ metadata.gz: 22726febef094ca0aed6f6e2f88d0b101835c165049207b5c0b7c48b1a54f6114158b25db51d8fe7424d4b1d0481bf7cd0f88b33bd3917c7ce519c9ef0db6ca2
7
+ data.tar.gz: 5e8e436502922f62657918e5c5ae546e12869866e4dcce25a56aa753ea4c51a2319b38909ccb2093575e0dd11ce37752279eee15a283f94e3ce54dd43d90d9ae
data/VERSION CHANGED
@@ -1 +1 @@
1
- 0.6.5
1
+ 0.7.0
@@ -6,7 +6,7 @@
6
6
  # }
7
7
  module Anschel
8
8
  class Filter
9
- def convert conf, log
9
+ def convert conf, stats, log
10
10
  field = conf.delete :field
11
11
  type = conf.delete :type
12
12
 
@@ -21,12 +21,22 @@ module Anschel
21
21
  'string' => :to_s
22
22
  }
23
23
 
24
+ stats.create 'filter-convert'
25
+ stats.get 'filter-convert'
26
+ stats.create 'filter-convert-skipped'
27
+ stats.get 'filter-convert-skippped'
28
+
24
29
  log.trace event: 'filter-compiled', kind: 'convert', \
25
30
  field: field, type: type
26
31
 
27
32
  lambda do |event|
28
- return event unless event.has_key? field
33
+ unless event.has_key? field
34
+ stats.inc 'filter-convert-skipped'
35
+ return event
36
+ end
37
+
29
38
  event[field] = event[field].send type_conversions[type]
39
+ stats.inc 'filter-convert'
30
40
  filtered event, conf
31
41
  end
32
42
  end
@@ -7,7 +7,7 @@
7
7
  # }
8
8
  module Anschel
9
9
  class Filter
10
- def gsub conf, log
10
+ def gsub conf, stats, log
11
11
  field = conf.delete :field
12
12
  match = Regexp.new conf.delete(:match)
13
13
  replace = conf.delete :replace
@@ -18,13 +18,22 @@ module Anschel
18
18
 
19
19
  field = field.to_sym
20
20
 
21
+ stats.create 'filter-gsub'
22
+ stats.get 'filter-gsub'
23
+ stats.create 'filter-gsub-skipped'
24
+ stats.get 'filter-gsub-skipped'
21
25
 
22
26
  log.trace event: 'filter-compiled', kind: 'gsub', \
23
27
  field: field, match: match, replace: replace
24
28
 
25
29
  lambda do |event|
26
- return event unless event.has_key? field
30
+ unless event.has_key? field
31
+ stats.inc 'filter-gsub-skipped'
32
+ return event
33
+ end
34
+
27
35
  event[field].gsub! match, replace
36
+ stats.inc 'filter-gsub'
28
37
  filtered event, conf
29
38
  end
30
39
  end
@@ -8,7 +8,7 @@
8
8
  # }
9
9
  module Anschel
10
10
  class Filter
11
- def index conf, log
11
+ def index conf, stats, log
12
12
  stamp = conf.delete(:stamp) || '@timestamp'
13
13
  prefix = conf.delete(:prefix) || 'logs-%{type}-'
14
14
  suffix = conf.delete(:suffix) || '%Y.%m.%d'
@@ -22,30 +22,48 @@ module Anschel
22
22
  joda = joda.withDefaultYear(Time.new.year)
23
23
  joda = joda.withOffsetParsed
24
24
 
25
+ stats.create 'filter-index'
26
+ stats.get 'filter-index'
27
+ stats.create 'filter-index-skipped'
28
+ stats.get 'filter-index-skipped'
29
+ stats.create 'filter-index-error'
30
+ stats.get 'filter-index-error'
25
31
 
26
32
  log.trace event: 'filter-compiled', kind: 'index', \
27
33
  stamp: stamp, prefix: prefix, suffix: suffix, format: format
28
34
 
29
35
  lambda do |event|
30
- return event unless event.has_key? stamp
36
+ unless event.has_key? stamp
37
+ stats.inc 'filter-index-skipped'
38
+ return event
39
+ end
40
+
31
41
  idx_prefix = prefix % event
42
+
32
43
  begin
33
44
  millis = joda.parseMillis event[stamp]
34
45
  idx_suffix = Time.at(0.001 * millis).strftime(suffix)
35
46
  event[:_index] = idx_prefix + idx_suffix
47
+ stats.inc 'filter-index'
36
48
  filtered event, conf
37
49
  rescue java.lang.IllegalArgumentException => e
38
50
  event[:_index] = idx_prefix + Time.now.strftime(suffix)
39
- log.warn \
40
- event: 'filter-index-warning',
51
+ log.trace \
52
+ event: 'filter-index-error',
41
53
  reason: 'could not parse event',
42
54
  remediation: 'added bogus index',
43
55
  remediation: "sending to best-guess index '#{event[:_index]}'",
56
+ stamp: stamp,
57
+ prefix: prefix,
58
+ suffix: suffix,
59
+ format: format,
44
60
  raw_event: event
45
61
  if error_tag
46
62
  event[:tags] ||= []
47
63
  event[:tags] << error_tag
48
64
  end
65
+ stats.inc 'filter-index-error'
66
+ stats.inc 'filter-index'
49
67
  filtered event, conf
50
68
  end
51
69
  end
@@ -6,7 +6,7 @@
6
6
  # }
7
7
  module Anschel
8
8
  class Filter
9
- def parse conf, log
9
+ def parse conf, stats, log
10
10
  field = conf.delete :field
11
11
  pattern = Regexp.new conf.delete(:pattern)
12
12
 
@@ -15,25 +15,36 @@ module Anschel
15
15
 
16
16
  field = field.to_sym
17
17
 
18
+ stats.create 'filter-parse'
19
+ stats.get 'filter-parse'
20
+ stats.create 'filter-parse-skipped'
21
+ stats.get 'filter-parse-skipped'
22
+ stats.create 'filter-parse-error'
23
+ stats.get 'filter-parse-error'
18
24
 
19
25
  log.trace event: 'filter-compiled', kind: 'parse', \
20
26
  field: field, pattern: pattern
21
27
 
22
28
  lambda do |event|
23
- return event unless event.has_key? field
29
+ unless event.has_key? field
30
+ stats.inc 'filter-parse-skipped'
31
+ return event
32
+ end
24
33
  mdata = pattern.match event[field]
25
34
  if mdata.nil?
26
- log.error \
35
+ log.trace \
27
36
  event: 'parse-filter-error',
28
37
  reason: 'regexp did not match',
29
38
  field: field,
30
39
  pattern: pattern,
31
40
  raw_event: event
32
- return filtered(event, conf)
41
+ stats.inc 'filter-parse-error'
42
+ return event
33
43
  end
34
44
  mdata.names.each do |group|
35
45
  event[group.to_sym] = mdata[group]
36
46
  end
47
+ stats.inc 'filter-parse'
37
48
  filtered event, conf
38
49
  end
39
50
  end
@@ -7,7 +7,7 @@
7
7
  # }
8
8
  module Anschel
9
9
  class Filter
10
- def scan conf, log
10
+ def scan conf, stats, log
11
11
  field = conf.delete :field
12
12
  pattern = Regexp.new conf.delete(:pattern)
13
13
  target = conf.delete :target
@@ -19,19 +19,44 @@ module Anschel
19
19
  field = field.to_sym
20
20
  target = target.to_sym
21
21
 
22
+ stats.create 'filter-scan'
23
+ stats.get 'filter-scan'
24
+ stats.create 'filter-scan-skipped'
25
+ stats.get 'filter-scan-skipped'
26
+ stats.create 'filter-scan-nomatch'
27
+ stats.get 'filter-scan-nomatch'
28
+ stats.create 'filter-scan-error'
29
+ stats.get 'filter-scan-error'
22
30
 
23
31
  log.trace event: 'filter-compiled', kind: 'scan', \
24
32
  field: field, pattern: pattern, target: target
25
33
 
26
34
  lambda do |event|
27
- return event unless event.has_key? field
28
- results = event[field].scan(pattern).flatten.uniq
35
+ unless event.has_key? field
36
+ stats.inc 'filter-scan-skipped'
37
+ return event
38
+ end
39
+ begin
40
+ results = event[field].scan(pattern).flatten.uniq
41
+ rescue StandardError
42
+ log.trace \
43
+ event: 'scan-filter-error',
44
+ reason: 'could not scan event',
45
+ field: field,
46
+ pattern: pattern,
47
+ target: target,
48
+ raw_event: event
49
+ stats.inc 'filter-scan-error'
50
+ return event
51
+ end
29
52
 
30
53
  if results.empty?
54
+ stats.inc 'filter-scan-nomatch'
31
55
  event
32
56
  else
33
57
  event[target] ||= []
34
58
  event[target] += results
59
+ stats.inc 'filter-scan'
35
60
  filtered event, conf
36
61
  end
37
62
  end
@@ -8,7 +8,7 @@
8
8
  # }
9
9
  module Anschel
10
10
  class Filter
11
- def stamp conf, log
11
+ def stamp conf, stats, log
12
12
  utc = conf.delete :utc?
13
13
  field = conf.delete :field
14
14
  pattern = conf.delete :pattern
@@ -32,31 +32,46 @@ module Anschel
32
32
 
33
33
  offset_s = utc ? Time.zone_offset(Time.now.zone).to_f : 0.0
34
34
 
35
+ stats.create 'filter-stamp'
36
+ stats.get 'filter-stamp'
37
+ stats.create 'filter-stamp-skipped'
38
+ stats.get 'filter-stamp-skipped'
39
+ stats.create 'filter-stamp-error'
40
+ stats.get 'filter-stamp-error'
35
41
 
36
42
  log.trace event: 'filter-compiled', kind: 'stamp', \
37
43
  utc?: utc, field: field, pattern: pattern, target: target
38
44
 
39
45
  lambda do |event|
40
- return event unless event.has_key? field
46
+ unless event.has_key? field
47
+ stats.inc 'filter-stamp-skipped'
48
+ return event
49
+ end
41
50
  parsers.each do |joda|
42
51
  begin
43
52
  millis = joda.parseMillis event[field]
44
53
  event[target] = Time.at(0.001 * millis + offset_s).iso8601(3)
54
+ stats.inc 'filter-stamp'
45
55
  return filtered(event, conf)
46
56
  rescue
47
57
  end
48
58
  end
49
59
 
50
- log.warn \
51
- event: 'stamp-filter-warning',
60
+ log.trace \
61
+ event: 'stamp-filter-error',
52
62
  reason: 'could not parse event',
53
63
  remediation: 'using current time for stamp',
64
+ utc?: utc,
65
+ field: field,
66
+ pattern: pattern,
67
+ target: target,
54
68
  raw_event: event
55
69
  if error_tag
56
70
  event[:tags] ||= []
57
71
  event[:tags] << error_tag
58
72
  end
59
73
  event[target] = Time.now.utc.iso8601(3)
74
+ stats.inc 'filter-stamp-error'
60
75
  filtered event, conf
61
76
  end
62
77
  end
@@ -20,7 +20,7 @@ module Anschel
20
20
  filter_defns.each do |filter_defn|
21
21
  filter_type = filter_defn.keys.first
22
22
  filter_conf = filter_defn[filter_type]
23
- @filters[event_type] << self.send(filter_type, filter_conf, log)
23
+ @filters[event_type] << self.send(filter_type, filter_conf, stats, log)
24
24
  end
25
25
  end
26
26
  log.info event: 'filter-fully-loaded'
@@ -7,6 +7,8 @@ module Anschel
7
7
  class Input
8
8
  class RabbitMQ < Base
9
9
  def initialize output, config, stats, log
10
+ connection_defaults = {}
11
+
10
12
  exchange_defaults = {
11
13
  type: 'x-consistent-hash',
12
14
  durable: true
@@ -23,25 +25,31 @@ module Anschel
23
25
  ack: true
24
26
  }
25
27
 
28
+ connection = ::MarchHare.connect \
29
+ connection_defaults.merge(config[:connection] || {})
30
+
26
31
  exchange_name = config[:exchange].delete(:name)
27
32
 
28
33
  @threads = config[:queues].map do |queue_name, queue_config|
29
34
  Thread.new do
30
- connection = ::MarchHare.connect config[:connection]
31
-
32
35
  channel = connection.create_channel
33
36
 
34
37
  exchange = channel.exchange exchange_name, \
35
38
  exchange_defaults.merge(config[:exchange])
36
39
 
40
+ subscription = subscription_defaults.merge \
41
+ (config[:subscription] || {})
42
+
37
43
  queue = channel.queue queue_name.to_s, \
38
44
  queue_defaults.merge(queue_config)
39
45
 
40
- subscription = subscription_defaults.merge \
41
- (config[:subscription] || {})
46
+ log.debug \
47
+ event: 'input-rabbitmq-connecting-queue',
48
+ queue: queue_name
42
49
 
43
50
  queue.subscribe(subscription) do |meta, message|
44
51
  output << message
52
+ stats.inc 'input'
45
53
  channel.ack meta.delivery_tag, false if subscription[:ack]
46
54
  end
47
55
  end
@@ -50,7 +58,7 @@ module Anschel
50
58
 
51
59
  def stop
52
60
  return if @stopped
53
- @threads.map &:kill
61
+ @threads.map(&:kill)
54
62
  @stopped = true
55
63
  end
56
64
  end
data/lib/anschel/input.rb CHANGED
@@ -8,7 +8,7 @@ module Anschel
8
8
  log.info event: 'output-loading'
9
9
  log.debug event: 'output-config', config: config, qsize: qsize
10
10
 
11
- @queue = SizedQueue.new qsize || 2000
11
+ @queue = SizedQueue.new(qsize || 2000)
12
12
 
13
13
  Thread.new do
14
14
  leftovers ||= []
@@ -18,6 +18,9 @@ module Anschel
18
18
 
19
19
  @inputs = []
20
20
 
21
+ stats.create 'input'
22
+ stats.get 'input'
23
+
21
24
  config.each do |input|
22
25
  case input.delete(:kind)
23
26
  when 'kafka'
data/lib/anschel/main.rb CHANGED
@@ -85,7 +85,7 @@ module Anschel
85
85
  exception: e.inspect,
86
86
  class: e.class,
87
87
  message: e.message,
88
- backtrace: e.backtrace.join("\n")
88
+ backtrace: e.backtrace
89
89
  bye output, input, store, log, :error
90
90
  exit 2
91
91
  end
@@ -42,7 +42,11 @@ module Anschel
42
42
  # Construct a Logger given the command-line options
43
43
  def log
44
44
  return @logger if defined? @logger
45
- @logger = Slog.new out: (options.log || $stdout), prettify: false
45
+ device = options.log || $stdout
46
+ colorize, prettify = false, false
47
+ colorize, prettify = true, true if device.tty? rescue false
48
+ @logger = Slog.new \
49
+ out: device, colorize: colorize, prettify: prettify
46
50
  @logger.level = :debug if options.debug?
47
51
  @logger.level = :trace if options.trace?
48
52
  @logger
@@ -50,6 +50,7 @@ module Anschel
50
50
  end
51
51
 
52
52
  client.bulk body: body
53
+ stats.inc 'output', body.size
53
54
  end
54
55
  end
55
56
  end
@@ -10,6 +10,9 @@ module Anschel
10
10
 
11
11
  @outputs = []
12
12
 
13
+ stats.create 'output'
14
+ stats.get 'output'
15
+
13
16
  config.each do |output|
14
17
  case output.delete(:kind)
15
18
  when 'device'
data/lib/anschel/stats.rb CHANGED
@@ -78,7 +78,7 @@ module Anschel
78
78
  stats[k][:val] = stats[k].default
79
79
  [
80
80
  [ k, v ],
81
- [ "#{k}_rate", 1.0 * v / @interval ]
81
+ [ "#{k}-rate", 1.0 * v / @interval ]
82
82
  ]
83
83
  end
84
84
  end
metadata CHANGED
@@ -1,19 +1,19 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: anschel
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.6.5
4
+ version: 0.7.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Sean Clemmer
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2015-10-08 00:00:00.000000000 Z
11
+ date: 2015-10-14 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement
15
15
  requirements:
16
- - - ~>
16
+ - - "~>"
17
17
  - !ruby/object:Gem::Version
18
18
  version: '0.19'
19
19
  name: thor
@@ -21,13 +21,13 @@ dependencies:
21
21
  type: :runtime
22
22
  version_requirements: !ruby/object:Gem::Requirement
23
23
  requirements:
24
- - - ~>
24
+ - - "~>"
25
25
  - !ruby/object:Gem::Version
26
26
  version: '0.19'
27
27
  - !ruby/object:Gem::Dependency
28
28
  requirement: !ruby/object:Gem::Requirement
29
29
  requirements:
30
- - - ~>
30
+ - - "~>"
31
31
  - !ruby/object:Gem::Version
32
32
  version: 1.4.0.pre.java
33
33
  name: jruby-kafka
@@ -35,27 +35,27 @@ dependencies:
35
35
  type: :runtime
36
36
  version_requirements: !ruby/object:Gem::Requirement
37
37
  requirements:
38
- - - ~>
38
+ - - "~>"
39
39
  - !ruby/object:Gem::Version
40
40
  version: 1.4.0.pre.java
41
41
  - !ruby/object:Gem::Dependency
42
42
  requirement: !ruby/object:Gem::Requirement
43
43
  requirements:
44
- - - ~>
44
+ - - "~>"
45
45
  - !ruby/object:Gem::Version
46
- version: '0.2'
46
+ version: '0.3'
47
47
  name: jrjackson
48
48
  prerelease: false
49
49
  type: :runtime
50
50
  version_requirements: !ruby/object:Gem::Requirement
51
51
  requirements:
52
- - - ~>
52
+ - - "~>"
53
53
  - !ruby/object:Gem::Version
54
- version: '0.2'
54
+ version: '0.3'
55
55
  - !ruby/object:Gem::Dependency
56
56
  requirement: !ruby/object:Gem::Requirement
57
57
  requirements:
58
- - - ~>
58
+ - - "~>"
59
59
  - !ruby/object:Gem::Version
60
60
  version: '1.0'
61
61
  name: elasticsearch
@@ -63,27 +63,27 @@ dependencies:
63
63
  type: :runtime
64
64
  version_requirements: !ruby/object:Gem::Requirement
65
65
  requirements:
66
- - - ~>
66
+ - - "~>"
67
67
  - !ruby/object:Gem::Version
68
68
  version: '1.0'
69
69
  - !ruby/object:Gem::Dependency
70
70
  requirement: !ruby/object:Gem::Requirement
71
71
  requirements:
72
- - - ~>
72
+ - - "~>"
73
73
  - !ruby/object:Gem::Version
74
- version: '0.7'
74
+ version: '0.8'
75
75
  name: typhoeus
76
76
  prerelease: false
77
77
  type: :runtime
78
78
  version_requirements: !ruby/object:Gem::Requirement
79
79
  requirements:
80
- - - ~>
80
+ - - "~>"
81
81
  - !ruby/object:Gem::Version
82
- version: '0.7'
82
+ version: '0.8'
83
83
  - !ruby/object:Gem::Dependency
84
84
  requirement: !ruby/object:Gem::Requirement
85
85
  requirements:
86
- - - ~>
86
+ - - "~>"
87
87
  - !ruby/object:Gem::Version
88
88
  version: '1.1'
89
89
  name: slog
@@ -91,23 +91,23 @@ dependencies:
91
91
  type: :runtime
92
92
  version_requirements: !ruby/object:Gem::Requirement
93
93
  requirements:
94
- - - ~>
94
+ - - "~>"
95
95
  - !ruby/object:Gem::Version
96
96
  version: '1.1'
97
97
  - !ruby/object:Gem::Dependency
98
98
  requirement: !ruby/object:Gem::Requirement
99
99
  requirements:
100
- - - ~>
100
+ - - "~>"
101
101
  - !ruby/object:Gem::Version
102
- version: '2.12'
102
+ version: '2.13'
103
103
  name: march_hare
104
104
  prerelease: false
105
105
  type: :runtime
106
106
  version_requirements: !ruby/object:Gem::Requirement
107
107
  requirements:
108
- - - ~>
108
+ - - "~>"
109
109
  - !ruby/object:Gem::Version
110
- version: '2.12'
110
+ version: '2.13'
111
111
  description: Logstash-like for moving events from Kafka into Elasticsearch.
112
112
  email: sczizzo@gmail.com
113
113
  executables:
@@ -150,19 +150,18 @@ require_paths:
150
150
  - lib
151
151
  required_ruby_version: !ruby/object:Gem::Requirement
152
152
  requirements:
153
- - - '>='
153
+ - - ">="
154
154
  - !ruby/object:Gem::Version
155
155
  version: '0'
156
156
  required_rubygems_version: !ruby/object:Gem::Requirement
157
157
  requirements:
158
- - - '>='
158
+ - - ">="
159
159
  - !ruby/object:Gem::Version
160
160
  version: '0'
161
161
  requirements: []
162
162
  rubyforge_project:
163
- rubygems_version: 2.4.6
163
+ rubygems_version: 2.4.8
164
164
  signing_key:
165
165
  specification_version: 4
166
166
  summary: Logstash-like for moving events from Kafka into Elasticsearch
167
167
  test_files: []
168
- has_rdoc: