logstash-codec-json 2.0.4 → 2.1.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 8418909b85cdc9db2f663891ed81edbea8140149
4
- data.tar.gz: f4dd2d0463ed1095deb0c08675f8e9cc9c0c59cc
3
+ metadata.gz: 805e4007ea5c42d2d1b750ecc1a0adbbd5122286
4
+ data.tar.gz: 5f94fb31f2a7d4d01ed917480eac709be5fe1871
5
5
  SHA512:
6
- metadata.gz: 8a772adc3d115dec99a9da5aa6a0e842615d22bb4a6691669534717b9d3327417249b48ad981fced60cb6c9a65e1ed2fe5b39450981e3dd66b8b9c806537f5e5
7
- data.tar.gz: 4c83176889fd32e648da63cfbc48a8814a5b45fca468f64b49fcb77e896949f88e19c0837ab6a2e39914cb616bea45cb2e4cfbebceb995e3638dc152dcf995d9
6
+ metadata.gz: fa6ab145db216675d8e4323976ec6f2407109efb7be7e5bbe05eb1162252f1d5ef3d940f0af415acecaedb70adb8f0de314c384452b3607b2b9f08620f289cbe
7
+ data.tar.gz: 2e94e21b7d41bc02cc3ce296d1cefd55c1cc17ba149534ce25cf29b361519e53ad6b58f948817f1c1d777e7063ab1c239bfe8e045f42f54d19d1ed7095a55186
data/CHANGELOG.md CHANGED
@@ -1,3 +1,6 @@
1
+ ## 2.1.0
2
+ - Backward compatible support for `Event#from_json` method https://github.com/logstash-plugins/logstash-codec-json/pull/21
3
+
1
4
  ## 2.0.4
2
5
  - Reduce the size of the gem by removing the vendor files
3
6
 
@@ -17,7 +17,6 @@ require "logstash/json"
17
17
  class LogStash::Codecs::JSON < LogStash::Codecs::Base
18
18
  config_name "json"
19
19
 
20
-
21
20
  # The character encoding used in this codec. Examples include "UTF-8" and
22
21
  # "CP1252".
23
22
  #
@@ -29,43 +28,57 @@ class LogStash::Codecs::JSON < LogStash::Codecs::Base
29
28
  # For nxlog users, you may to set this to "CP1252".
30
29
  config :charset, :validate => ::Encoding.name_list, :default => "UTF-8"
31
30
 
32
- public
33
31
  def register
34
32
  @converter = LogStash::Util::Charset.new(@charset)
35
33
  @converter.logger = @logger
36
34
  end
37
35
 
38
- public
39
- def decode(data)
40
- data = @converter.convert(data)
41
- begin
42
- decoded = LogStash::Json.load(data)
43
- if decoded.is_a?(Array)
44
- decoded.each {|item| yield(LogStash::Event.new(item)) }
45
- elsif decoded.is_a?(Hash)
46
- yield LogStash::Event.new(decoded)
47
- else
48
- @logger.info? && @logger.info("JSON codec received a scalar instead of an Arary or Object!", :data => data)
49
- yield LogStash::Event.new("message" => data, "tags" => ["_jsonparsefailure"])
50
- end
51
-
52
- rescue LogStash::Json::ParserError => e
53
- @logger.info("JSON parse failure. Falling back to plain-text", :error => e, :data => data)
54
- yield LogStash::Event.new("message" => data, "tags" => ["_jsonparsefailure"])
55
- rescue StandardError => e
56
- # This should NEVER happen. But hubris has been the cause of many pipeline breaking things
57
- # If something bad should happen we just don't want to crash logstash here.
58
- @logger.warn("An unexpected error occurred parsing input to JSON",
59
- :input => data,
60
- :message => e.message,
61
- :class => e.class.name,
62
- :backtrace => e.backtrace)
63
- end
64
- end # def decode
36
+ def decode(data, &block)
37
+ parse(@converter.convert(data), &block)
38
+ end
65
39
 
66
- public
67
40
  def encode(event)
68
41
  @on_event.call(event, event.to_json)
69
- end # def encode
42
+ end
43
+
44
+ private
45
+
46
+ def from_json_parse(json, &block)
47
+ LogStash::Event.from_json(json).each { |event| yield event }
48
+ rescue LogStash::Json::ParserError => e
49
+ @logger.error("JSON parse error, original data now in message field", :error => e, :data => json)
50
+ yield LogStash::Event.new("message" => json, "tags" => ["_jsonparsefailure"])
51
+ end
52
+
53
+ def legacy_parse(json, &block)
54
+ decoded = LogStash::Json.load(json)
55
+
56
+ case decoded
57
+ when Array
58
+ decoded.each {|item| yield(LogStash::Event.new(item)) }
59
+ when Hash
60
+ yield LogStash::Event.new(decoded)
61
+ else
62
+ @logger.error("JSON codec is expecting array or object/map", :data => json)
63
+ yield LogStash::Event.new("message" => json, "tags" => ["_jsonparsefailure"])
64
+ end
65
+ rescue LogStash::Json::ParserError => e
66
+ @logger.error("JSON parse failure. Falling back to plain-text", :error => e, :data => json)
67
+ yield LogStash::Event.new("message" => json, "tags" => ["_jsonparsefailure"])
68
+ rescue StandardError => e
69
+ # This should NEVER happen. But hubris has been the cause of many pipeline breaking things
70
+ # If something bad should happen we just don't want to crash logstash here.
71
+ @logger.warn(
72
+ "An unexpected error occurred parsing JSON data",
73
+ :data => json,
74
+ :message => e.message,
75
+ :class => e.class.name,
76
+ :backtrace => e.backtrace
77
+ )
78
+ end
79
+
80
+ # keep compatibility with all v2.x distributions. only in 2.3 will the Event#from_json method be introduced
81
+ # and we need to keep compatibility for all v2 releases.
82
+ alias_method :parse, LogStash::Event.respond_to?(:from_json) ? :from_json_parse : :legacy_parse
70
83
 
71
- end # class LogStash::Codecs::JSON
84
+ end
@@ -1,17 +1,17 @@
1
1
  Gem::Specification.new do |s|
2
2
 
3
3
  s.name = 'logstash-codec-json'
4
- s.version = '2.0.4'
4
+ s.version = '2.1.0'
5
5
  s.licenses = ['Apache License (2.0)']
6
6
  s.summary = "This codec may be used to decode (via inputs) and encode (via outputs) full JSON messages"
7
7
  s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/plugin install gemname. This gem is not a stand-alone program"
8
8
  s.authors = ["Elastic"]
9
9
  s.email = 'info@elastic.co'
10
10
  s.homepage = "http://www.elastic.co/guide/en/logstash/current/index.html"
11
- s.require_paths = ["lib"]
11
+ s.require_paths = ["lib"]
12
12
 
13
13
  # Files
14
- s.files = Dir['lib/**/*','spec/**/*','vendor/**/*','*.gemspec','*.md','CONTRIBUTORS','Gemfile','LICENSE','NOTICE.TXT']
14
+ s.files = Dir['lib/**/*','spec/**/*','*.gemspec','*.md','CONTRIBUTORS','Gemfile','LICENSE','NOTICE.TXT']
15
15
 
16
16
  # Tests
17
17
  s.test_files = s.files.grep(%r{^(test|spec|features)/})
@@ -6,161 +6,186 @@ require "insist"
6
6
 
7
7
  describe LogStash::Codecs::JSON do
8
8
  subject do
9
- next LogStash::Codecs::JSON.new
9
+ LogStash::Codecs::JSON.new
10
10
  end
11
11
 
12
- context "#decode" do
13
- it "should return an event from json data" do
14
- data = {"foo" => "bar", "baz" => {"bah" => ["a","b","c"]}}
15
- subject.decode(LogStash::Json.dump(data)) do |event|
16
- insist { event.is_a? LogStash::Event }
17
- insist { event["foo"] } == data["foo"]
18
- insist { event["baz"] } == data["baz"]
19
- insist { event["bah"] } == data["bah"]
12
+ shared_examples :codec do
13
+
14
+ context "#decode" do
15
+ it "should return an event from json data" do
16
+ data = {"foo" => "bar", "baz" => {"bah" => ["a","b","c"]}}
17
+ subject.decode(LogStash::Json.dump(data)) do |event|
18
+ insist { event.is_a? LogStash::Event }
19
+ insist { event["foo"] } == data["foo"]
20
+ insist { event["baz"] } == data["baz"]
21
+ insist { event["bah"] } == data["bah"]
22
+ end
20
23
  end
21
- end
22
24
 
23
- it "should be fast", :performance => true do
24
- json = '{"message":"Hello world!","@timestamp":"2013-12-21T07:01:25.616Z","@version":"1","host":"Macintosh.local","sequence":1572456}'
25
- iterations = 500000
26
- count = 0
25
+ it "should be fast", :performance => true do
26
+ json = '{"message":"Hello world!","@timestamp":"2013-12-21T07:01:25.616Z","@version":"1","host":"Macintosh.local","sequence":1572456}'
27
+ iterations = 500000
28
+ count = 0
27
29
 
28
- # Warmup
29
- 10000.times { subject.decode(json) { } }
30
+ # Warmup
31
+ 10000.times { subject.decode(json) { } }
30
32
 
31
- start = Time.now
32
- iterations.times do
33
- subject.decode(json) do |event|
34
- count += 1
33
+ start = Time.now
34
+ iterations.times do
35
+ subject.decode(json) do |event|
36
+ count += 1
37
+ end
35
38
  end
39
+ duration = Time.now - start
40
+ insist { count } == iterations
41
+ puts "codecs/json rate: #{"%02.0f/sec" % (iterations / duration)}, elapsed: #{duration}s"
36
42
  end
37
- duration = Time.now - start
38
- insist { count } == iterations
39
- puts "codecs/json rate: #{"%02.0f/sec" % (iterations / duration)}, elapsed: #{duration}s"
40
- end
41
43
 
42
- context "processing plain text" do
43
- it "falls back to plain text" do
44
- decoded = false
45
- subject.decode("something that isn't json") do |event|
46
- decoded = true
47
- insist { event.is_a?(LogStash::Event) }
48
- insist { event["message"] } == "something that isn't json"
49
- insist { event["tags"] }.include?("_jsonparsefailure")
44
+ context "processing plain text" do
45
+ it "falls back to plain text" do
46
+ decoded = false
47
+ subject.decode("something that isn't json") do |event|
48
+ decoded = true
49
+ insist { event.is_a?(LogStash::Event) }
50
+ insist { event["message"] } == "something that isn't json"
51
+ insist { event["tags"] }.include?("_jsonparsefailure")
52
+ end
53
+ insist { decoded } == true
50
54
  end
51
- insist { decoded } == true
52
55
  end
53
- end
54
56
 
55
- describe "scalar values" do
56
- shared_examples "given a value" do |value_arg|
57
- context "where value is #{value_arg}" do
58
- let(:value) { value_arg }
59
- let(:event) { LogStash::Event.new(value) }
60
- let(:value_json) { LogStash::Json.dump(value)}
61
- let(:event) do
62
- e = nil
63
- subject.decode(value_json) do |decoded|
64
- e = decoded
57
+ describe "scalar values" do
58
+ shared_examples "given a value" do |value_arg|
59
+ context "where value is #{value_arg}" do
60
+ let(:value) { value_arg }
61
+ let(:event) { LogStash::Event.new(value) }
62
+ let(:value_json) { LogStash::Json.dump(value)}
63
+ let(:event) do
64
+ e = nil
65
+ subject.decode(value_json) do |decoded|
66
+ e = decoded
67
+ end
68
+ e
65
69
  end
66
- e
67
- end
68
70
 
69
- it "should store the value in 'message'" do
70
- expect(event["message"]).to eql(value_json)
71
- end
71
+ it "should store the value in 'message'" do
72
+ expect(event["message"]).to eql(value_json)
73
+ end
72
74
 
73
- it "should have the json parse failure tag" do
74
- expect(event["tags"]).to include("_jsonparsefailure")
75
+ it "should have the json parse failure tag" do
76
+ expect(event["tags"]).to include("_jsonparsefailure")
77
+ end
75
78
  end
76
79
  end
77
- end
78
80
 
79
- include_examples "given a value", 123
80
- include_examples "given a value", "hello"
81
- include_examples "given a value", "-1"
82
- include_examples "given a value", " "
83
- end
81
+ include_examples "given a value", 123
82
+ include_examples "given a value", "hello"
83
+ include_examples "given a value", "-1"
84
+ include_examples "given a value", " "
85
+ end
84
86
 
85
- context "processing JSON with an array root" do
86
- let(:data) {
87
- [
88
- {"foo" => "bar"},
89
- {"foo" => "baz"}
90
- ]
91
- }
92
- let(:data_json) {
93
- LogStash::Json.dump(data)
94
- }
95
-
96
- it "should yield multiple events" do
97
- count = 0
98
- subject.decode(data_json) do |event|
99
- count += 1
87
+ context "processing JSON with an array root" do
88
+ let(:data) {
89
+ [
90
+ {"foo" => "bar"},
91
+ {"foo" => "baz"}
92
+ ]
93
+ }
94
+ let(:data_json) {
95
+ LogStash::Json.dump(data)
96
+ }
97
+
98
+ it "should yield multiple events" do
99
+ count = 0
100
+ subject.decode(data_json) do |event|
101
+ count += 1
102
+ end
103
+ expect(count).to eql(data.count)
100
104
  end
101
- expect(count).to eql(data.count)
102
- end
103
105
 
104
- it "should yield the correct objects" do
105
- index = 0
106
- subject.decode(data_json) do |event|
107
- expect(event.to_hash).to include(data[index])
108
- index += 1
106
+ it "should yield the correct objects" do
107
+ index = 0
108
+ subject.decode(data_json) do |event|
109
+ expect(event.to_hash).to include(data[index])
110
+ index += 1
111
+ end
109
112
  end
110
113
  end
111
- end
112
114
 
113
- context "processing weird binary blobs" do
114
- it "falls back to plain text and doesn't crash (LOGSTASH-1595)" do
115
- decoded = false
116
- blob = (128..255).to_a.pack("C*").force_encoding("ASCII-8BIT")
117
- subject.decode(blob) do |event|
118
- decoded = true
119
- insist { event.is_a?(LogStash::Event) }
120
- insist { event["message"].encoding.to_s } == "UTF-8"
115
+ context "processing weird binary blobs" do
116
+ it "falls back to plain text and doesn't crash (LOGSTASH-1595)" do
117
+ decoded = false
118
+ blob = (128..255).to_a.pack("C*").force_encoding("ASCII-8BIT")
119
+ subject.decode(blob) do |event|
120
+ decoded = true
121
+ insist { event.is_a?(LogStash::Event) }
122
+ insist { event["message"].encoding.to_s } == "UTF-8"
123
+ end
124
+ insist { decoded } == true
121
125
  end
122
- insist { decoded } == true
123
126
  end
124
- end
125
127
 
126
- context "when json could not be parsed" do
128
+ context "when json could not be parsed" do
127
129
 
128
- let(:message) { "random_message" }
130
+ let(:message) { "random_message" }
129
131
 
130
- it "add the failure tag" do
131
- subject.decode(message) do |event|
132
- expect(event).to include "tags"
132
+ it "add the failure tag" do
133
+ subject.decode(message) do |event|
134
+ expect(event).to include "tags"
135
+ end
133
136
  end
134
- end
135
137
 
136
- it "uses an array to store the tags" do
137
- subject.decode(message) do |event|
138
- expect(event['tags']).to be_a Array
138
+ it "uses an array to store the tags" do
139
+ subject.decode(message) do |event|
140
+ expect(event['tags']).to be_a Array
141
+ end
142
+ end
143
+
144
+ it "add a json parser failure tag" do
145
+ subject.decode(message) do |event|
146
+ expect(event['tags']).to include "_jsonparsefailure"
147
+ end
139
148
  end
140
149
  end
150
+ end
141
151
 
142
- it "add a json parser failure tag" do
143
- subject.decode(message) do |event|
144
- expect(event['tags']).to include "_jsonparsefailure"
152
+ context "#encode" do
153
+ it "should return json data" do
154
+ data = {"foo" => "bar", "baz" => {"bah" => ["a","b","c"]}}
155
+ event = LogStash::Event.new(data)
156
+ got_event = false
157
+ subject.on_event do |e, d|
158
+ insist { d.chomp } == event.to_json
159
+ insist { LogStash::Json.load(d)["foo"] } == data["foo"]
160
+ insist { LogStash::Json.load(d)["baz"] } == data["baz"]
161
+ insist { LogStash::Json.load(d)["bah"] } == data["bah"]
162
+ got_event = true
145
163
  end
164
+ subject.encode(event)
165
+ insist { got_event }
146
166
  end
147
167
  end
148
168
  end
149
169
 
150
- context "#encode" do
151
- it "should return json data" do
152
- data = {"foo" => "bar", "baz" => {"bah" => ["a","b","c"]}}
153
- event = LogStash::Event.new(data)
154
- got_event = false
155
- subject.on_event do |e, d|
156
- insist { d.chomp } == event.to_json
157
- insist { LogStash::Json.load(d)["foo"] } == data["foo"]
158
- insist { LogStash::Json.load(d)["baz"] } == data["baz"]
159
- insist { LogStash::Json.load(d)["bah"] } == data["bah"]
160
- got_event = true
170
+ context "forcing legacy parsing" do
171
+ it_behaves_like :codec do
172
+ before(:each) do
173
+ # stub codec parse method to force use of the legacy parser.
174
+ # this is very implementation specific but I am not sure how
175
+ # this can be tested otherwise.
176
+ allow(subject).to receive(:parse) do |data, &block|
177
+ subject.send(:legacy_parse, data, &block)
178
+ end
161
179
  end
162
- subject.encode(event)
163
- insist { got_event }
164
180
  end
165
181
  end
182
+
183
+ context "default parser choice" do
184
+ # here we cannot force the use of the Event#from_json since if this test is run in the
185
+ # legacy context (no Java Event) it will fail but if in the new context, it will be picked up.
186
+ it_behaves_like :codec do
187
+ # do nothing
188
+ end
189
+ end
190
+
166
191
  end
metadata CHANGED
@@ -1,17 +1,18 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-codec-json
3
3
  version: !ruby/object:Gem::Version
4
- version: 2.0.4
4
+ version: 2.1.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Elastic
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2015-11-20 00:00:00.000000000 Z
11
+ date: 2016-02-09 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
- requirement: !ruby/object:Gem::Requirement
14
+ name: logstash-core
15
+ version_requirements: !ruby/object:Gem::Requirement
15
16
  requirements:
16
17
  - - '>='
17
18
  - !ruby/object:Gem::Version
@@ -19,10 +20,7 @@ dependencies:
19
20
  - - <
20
21
  - !ruby/object:Gem::Version
21
22
  version: 3.0.0
22
- name: logstash-core
23
- prerelease: false
24
- type: :runtime
25
- version_requirements: !ruby/object:Gem::Requirement
23
+ requirement: !ruby/object:Gem::Requirement
26
24
  requirements:
27
25
  - - '>='
28
26
  - !ruby/object:Gem::Version
@@ -30,20 +28,22 @@ dependencies:
30
28
  - - <
31
29
  - !ruby/object:Gem::Version
32
30
  version: 3.0.0
31
+ prerelease: false
32
+ type: :runtime
33
33
  - !ruby/object:Gem::Dependency
34
- requirement: !ruby/object:Gem::Requirement
34
+ name: logstash-devutils
35
+ version_requirements: !ruby/object:Gem::Requirement
35
36
  requirements:
36
37
  - - '>='
37
38
  - !ruby/object:Gem::Version
38
39
  version: '0'
39
- name: logstash-devutils
40
- prerelease: false
41
- type: :development
42
- version_requirements: !ruby/object:Gem::Requirement
40
+ requirement: !ruby/object:Gem::Requirement
43
41
  requirements:
44
42
  - - '>='
45
43
  - !ruby/object:Gem::Version
46
44
  version: '0'
45
+ prerelease: false
46
+ type: :development
47
47
  description: This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/plugin install gemname. This gem is not a stand-alone program
48
48
  email: info@elastic.co
49
49
  executables: []