logstash-codec-avro 3.4.1-java → 3.5.0-java
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +14 -0
- data/docs/index.asciidoc +222 -0
- data/lib/logstash/codecs/avro.rb +248 -5
- data/logstash-codec-avro.gemspec +2 -1
- data/spec/integration/avro_integration_spec.rb +431 -0
- data/spec/integration/fixtures/jaas.config +5 -0
- data/spec/integration/fixtures/pwd +2 -0
- data/spec/integration/fixtures/trust-store_stub.jks +0 -0
- data/spec/integration/kafka_test_setup.sh +85 -0
- data/spec/integration/kafka_test_teardown.sh +16 -0
- data/spec/integration/setup_keystore_and_truststore.sh +17 -0
- data/spec/integration/start_auth_schema_registry.sh +8 -0
- data/spec/integration/start_schema_registry.sh +5 -0
- data/spec/integration/start_schema_registry_mutual.sh +5 -0
- data/spec/integration/stop_schema_registry.sh +6 -0
- data/spec/unit/avro_spec.rb +866 -0
- metadata +61 -22
- data/spec/codecs/avro_spec.rb +0 -203
metadata
CHANGED
|
@@ -1,16 +1,16 @@
|
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
|
2
2
|
name: logstash-codec-avro
|
|
3
3
|
version: !ruby/object:Gem::Version
|
|
4
|
-
version: 3.
|
|
4
|
+
version: 3.5.0
|
|
5
5
|
platform: java
|
|
6
6
|
authors:
|
|
7
7
|
- Elastic
|
|
8
|
-
autorequire:
|
|
9
8
|
bindir: bin
|
|
10
9
|
cert_chain: []
|
|
11
|
-
date:
|
|
10
|
+
date: 2025-11-26 00:00:00.000000000 Z
|
|
12
11
|
dependencies:
|
|
13
12
|
- !ruby/object:Gem::Dependency
|
|
13
|
+
name: logstash-core-plugin-api
|
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|
|
15
15
|
requirements:
|
|
16
16
|
- - ">="
|
|
@@ -19,9 +19,8 @@ dependencies:
|
|
|
19
19
|
- - "<="
|
|
20
20
|
- !ruby/object:Gem::Version
|
|
21
21
|
version: '2.99'
|
|
22
|
-
name: logstash-core-plugin-api
|
|
23
|
-
prerelease: false
|
|
24
22
|
type: :runtime
|
|
23
|
+
prerelease: false
|
|
25
24
|
version_requirements: !ruby/object:Gem::Requirement
|
|
26
25
|
requirements:
|
|
27
26
|
- - ">="
|
|
@@ -31,84 +30,104 @@ dependencies:
|
|
|
31
30
|
- !ruby/object:Gem::Version
|
|
32
31
|
version: '2.99'
|
|
33
32
|
- !ruby/object:Gem::Dependency
|
|
33
|
+
name: avro
|
|
34
34
|
requirement: !ruby/object:Gem::Requirement
|
|
35
35
|
requirements:
|
|
36
36
|
- - "~>"
|
|
37
37
|
- !ruby/object:Gem::Version
|
|
38
38
|
version: 1.10.2
|
|
39
|
-
name: avro
|
|
40
|
-
prerelease: false
|
|
41
39
|
type: :runtime
|
|
40
|
+
prerelease: false
|
|
42
41
|
version_requirements: !ruby/object:Gem::Requirement
|
|
43
42
|
requirements:
|
|
44
43
|
- - "~>"
|
|
45
44
|
- !ruby/object:Gem::Version
|
|
46
45
|
version: 1.10.2
|
|
47
46
|
- !ruby/object:Gem::Dependency
|
|
47
|
+
name: manticore
|
|
48
|
+
requirement: !ruby/object:Gem::Requirement
|
|
49
|
+
requirements:
|
|
50
|
+
- - ">="
|
|
51
|
+
- !ruby/object:Gem::Version
|
|
52
|
+
version: 0.8.0
|
|
53
|
+
- - "<"
|
|
54
|
+
- !ruby/object:Gem::Version
|
|
55
|
+
version: 1.0.0
|
|
56
|
+
type: :runtime
|
|
57
|
+
prerelease: false
|
|
58
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
59
|
+
requirements:
|
|
60
|
+
- - ">="
|
|
61
|
+
- !ruby/object:Gem::Version
|
|
62
|
+
version: 0.8.0
|
|
63
|
+
- - "<"
|
|
64
|
+
- !ruby/object:Gem::Version
|
|
65
|
+
version: 1.0.0
|
|
66
|
+
- !ruby/object:Gem::Dependency
|
|
67
|
+
name: logstash-mixin-ecs_compatibility_support
|
|
48
68
|
requirement: !ruby/object:Gem::Requirement
|
|
49
69
|
requirements:
|
|
50
70
|
- - "~>"
|
|
51
71
|
- !ruby/object:Gem::Version
|
|
52
72
|
version: '1.3'
|
|
53
|
-
name: logstash-mixin-ecs_compatibility_support
|
|
54
|
-
prerelease: false
|
|
55
73
|
type: :runtime
|
|
74
|
+
prerelease: false
|
|
56
75
|
version_requirements: !ruby/object:Gem::Requirement
|
|
57
76
|
requirements:
|
|
58
77
|
- - "~>"
|
|
59
78
|
- !ruby/object:Gem::Version
|
|
60
79
|
version: '1.3'
|
|
61
80
|
- !ruby/object:Gem::Dependency
|
|
81
|
+
name: logstash-mixin-event_support
|
|
62
82
|
requirement: !ruby/object:Gem::Requirement
|
|
63
83
|
requirements:
|
|
64
84
|
- - "~>"
|
|
65
85
|
- !ruby/object:Gem::Version
|
|
66
86
|
version: '1.0'
|
|
67
|
-
name: logstash-mixin-event_support
|
|
68
|
-
prerelease: false
|
|
69
87
|
type: :runtime
|
|
88
|
+
prerelease: false
|
|
70
89
|
version_requirements: !ruby/object:Gem::Requirement
|
|
71
90
|
requirements:
|
|
72
91
|
- - "~>"
|
|
73
92
|
- !ruby/object:Gem::Version
|
|
74
93
|
version: '1.0'
|
|
75
94
|
- !ruby/object:Gem::Dependency
|
|
95
|
+
name: logstash-mixin-validator_support
|
|
76
96
|
requirement: !ruby/object:Gem::Requirement
|
|
77
97
|
requirements:
|
|
78
98
|
- - "~>"
|
|
79
99
|
- !ruby/object:Gem::Version
|
|
80
100
|
version: '1.0'
|
|
81
|
-
name: logstash-mixin-validator_support
|
|
82
|
-
prerelease: false
|
|
83
101
|
type: :runtime
|
|
102
|
+
prerelease: false
|
|
84
103
|
version_requirements: !ruby/object:Gem::Requirement
|
|
85
104
|
requirements:
|
|
86
105
|
- - "~>"
|
|
87
106
|
- !ruby/object:Gem::Version
|
|
88
107
|
version: '1.0'
|
|
89
108
|
- !ruby/object:Gem::Dependency
|
|
109
|
+
name: logstash-devutils
|
|
90
110
|
requirement: !ruby/object:Gem::Requirement
|
|
91
111
|
requirements:
|
|
92
112
|
- - ">="
|
|
93
113
|
- !ruby/object:Gem::Version
|
|
94
114
|
version: '0'
|
|
95
|
-
name: logstash-devutils
|
|
96
|
-
prerelease: false
|
|
97
115
|
type: :development
|
|
116
|
+
prerelease: false
|
|
98
117
|
version_requirements: !ruby/object:Gem::Requirement
|
|
99
118
|
requirements:
|
|
100
119
|
- - ">="
|
|
101
120
|
- !ruby/object:Gem::Version
|
|
102
121
|
version: '0'
|
|
103
122
|
- !ruby/object:Gem::Dependency
|
|
123
|
+
name: insist
|
|
104
124
|
requirement: !ruby/object:Gem::Requirement
|
|
105
125
|
requirements:
|
|
106
126
|
- - ">="
|
|
107
127
|
- !ruby/object:Gem::Version
|
|
108
128
|
version: '0'
|
|
109
|
-
name: insist
|
|
110
|
-
prerelease: false
|
|
111
129
|
type: :development
|
|
130
|
+
prerelease: false
|
|
112
131
|
version_requirements: !ruby/object:Gem::Requirement
|
|
113
132
|
requirements:
|
|
114
133
|
- - ">="
|
|
@@ -132,14 +151,24 @@ files:
|
|
|
132
151
|
- docs/index.asciidoc
|
|
133
152
|
- lib/logstash/codecs/avro.rb
|
|
134
153
|
- logstash-codec-avro.gemspec
|
|
135
|
-
- spec/
|
|
154
|
+
- spec/integration/avro_integration_spec.rb
|
|
155
|
+
- spec/integration/fixtures/jaas.config
|
|
156
|
+
- spec/integration/fixtures/pwd
|
|
157
|
+
- spec/integration/fixtures/trust-store_stub.jks
|
|
158
|
+
- spec/integration/kafka_test_setup.sh
|
|
159
|
+
- spec/integration/kafka_test_teardown.sh
|
|
160
|
+
- spec/integration/setup_keystore_and_truststore.sh
|
|
161
|
+
- spec/integration/start_auth_schema_registry.sh
|
|
162
|
+
- spec/integration/start_schema_registry.sh
|
|
163
|
+
- spec/integration/start_schema_registry_mutual.sh
|
|
164
|
+
- spec/integration/stop_schema_registry.sh
|
|
165
|
+
- spec/unit/avro_spec.rb
|
|
136
166
|
homepage: https://www.elastic.co/logstash
|
|
137
167
|
licenses:
|
|
138
168
|
- Apache-2.0
|
|
139
169
|
metadata:
|
|
140
170
|
logstash_plugin: 'true'
|
|
141
171
|
logstash_group: codec
|
|
142
|
-
post_install_message:
|
|
143
172
|
rdoc_options: []
|
|
144
173
|
require_paths:
|
|
145
174
|
- lib
|
|
@@ -154,9 +183,19 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
|
154
183
|
- !ruby/object:Gem::Version
|
|
155
184
|
version: '0'
|
|
156
185
|
requirements: []
|
|
157
|
-
rubygems_version: 3.
|
|
158
|
-
signing_key:
|
|
186
|
+
rubygems_version: 3.6.3
|
|
159
187
|
specification_version: 4
|
|
160
188
|
summary: Reads serialized Avro records as Logstash events
|
|
161
189
|
test_files:
|
|
162
|
-
- spec/
|
|
190
|
+
- spec/integration/avro_integration_spec.rb
|
|
191
|
+
- spec/integration/fixtures/jaas.config
|
|
192
|
+
- spec/integration/fixtures/pwd
|
|
193
|
+
- spec/integration/fixtures/trust-store_stub.jks
|
|
194
|
+
- spec/integration/kafka_test_setup.sh
|
|
195
|
+
- spec/integration/kafka_test_teardown.sh
|
|
196
|
+
- spec/integration/setup_keystore_and_truststore.sh
|
|
197
|
+
- spec/integration/start_auth_schema_registry.sh
|
|
198
|
+
- spec/integration/start_schema_registry.sh
|
|
199
|
+
- spec/integration/start_schema_registry_mutual.sh
|
|
200
|
+
- spec/integration/stop_schema_registry.sh
|
|
201
|
+
- spec/unit/avro_spec.rb
|
data/spec/codecs/avro_spec.rb
DELETED
|
@@ -1,203 +0,0 @@
|
|
|
1
|
-
# encoding: utf-8
|
|
2
|
-
require 'logstash/devutils/rspec/spec_helper'
|
|
3
|
-
require 'insist'
|
|
4
|
-
require 'avro'
|
|
5
|
-
require 'base64'
|
|
6
|
-
require 'logstash/codecs/avro'
|
|
7
|
-
require 'logstash/event'
|
|
8
|
-
require 'logstash/plugin_mixins/ecs_compatibility_support/spec_helper'
|
|
9
|
-
|
|
10
|
-
describe LogStash::Codecs::Avro, :ecs_compatibility_support, :aggregate_failures do
|
|
11
|
-
|
|
12
|
-
ecs_compatibility_matrix(:disabled, :v1, :v8 => :v1) do |ecs_select|
|
|
13
|
-
before(:each) do
|
|
14
|
-
allow_any_instance_of(described_class).to receive(:ecs_compatibility).and_return(ecs_compatibility)
|
|
15
|
-
end
|
|
16
|
-
|
|
17
|
-
context "non binary data" do
|
|
18
|
-
let (:avro_config) {{ 'schema_uri' => '
|
|
19
|
-
{"type": "record", "name": "Test",
|
|
20
|
-
"fields": [{"name": "foo", "type": ["null", "string"]},
|
|
21
|
-
{"name": "bar", "type": "int"}]}' }}
|
|
22
|
-
let (:test_event_hash) { { "foo" => "hello", "bar" => 10 } }
|
|
23
|
-
let (:test_event) {LogStash::Event.new(test_event_hash)}
|
|
24
|
-
|
|
25
|
-
subject do
|
|
26
|
-
allow_any_instance_of(LogStash::Codecs::Avro).to \
|
|
27
|
-
receive(:open_and_read).and_return(avro_config['schema_uri'])
|
|
28
|
-
next LogStash::Codecs::Avro.new(avro_config)
|
|
29
|
-
end
|
|
30
|
-
|
|
31
|
-
context "#decode" do
|
|
32
|
-
it "should return an LogStash::Event from raw and base64 encoded avro data" do
|
|
33
|
-
schema = Avro::Schema.parse(avro_config['schema_uri'])
|
|
34
|
-
dw = Avro::IO::DatumWriter.new(schema)
|
|
35
|
-
buffer = StringIO.new
|
|
36
|
-
encoder = Avro::IO::BinaryEncoder.new(buffer)
|
|
37
|
-
dw.write(test_event.to_hash, encoder)
|
|
38
|
-
|
|
39
|
-
subject.decode(Base64.strict_encode64(buffer.string)) do |event|
|
|
40
|
-
insist {event.is_a? LogStash::Event}
|
|
41
|
-
insist {event.get("foo")} == test_event.get("foo")
|
|
42
|
-
insist {event.get("bar")} == test_event.get("bar")
|
|
43
|
-
expect(event.get('[event][original]')).to eq(Base64.strict_encode64(buffer.string)) if ecs_compatibility != :disabled
|
|
44
|
-
end
|
|
45
|
-
subject.decode(buffer.string) do |event|
|
|
46
|
-
insist {event.is_a? LogStash::Event}
|
|
47
|
-
insist {event.get("foo")} == test_event.get("foo")
|
|
48
|
-
insist {event.get("bar")} == test_event.get("bar")
|
|
49
|
-
expect(event.get('[event][original]')).to eq(buffer.string) if ecs_compatibility != :disabled
|
|
50
|
-
end
|
|
51
|
-
end
|
|
52
|
-
|
|
53
|
-
it "should throw exception if decoding fails" do
|
|
54
|
-
expect {subject.decode("not avro") {|_| }}.to raise_error NoMethodError
|
|
55
|
-
end
|
|
56
|
-
end
|
|
57
|
-
|
|
58
|
-
context "with binary encoding" do
|
|
59
|
-
let (:avro_config) { super().merge('encoding' => 'binary') }
|
|
60
|
-
|
|
61
|
-
it "should return an LogStash::Event from raw and base64 encoded avro data" do
|
|
62
|
-
schema = Avro::Schema.parse(avro_config['schema_uri'])
|
|
63
|
-
dw = Avro::IO::DatumWriter.new(schema)
|
|
64
|
-
buffer = StringIO.new
|
|
65
|
-
encoder = Avro::IO::BinaryEncoder.new(buffer)
|
|
66
|
-
dw.write(test_event.to_hash, encoder)
|
|
67
|
-
|
|
68
|
-
subject.decode(buffer.string) do |event|
|
|
69
|
-
expect(event).to be_a_kind_of(LogStash::Event)
|
|
70
|
-
expect(event.get("foo")).to eq(test_event.get("foo"))
|
|
71
|
-
expect(event.get("bar")).to eq(test_event.get("bar"))
|
|
72
|
-
expect(event.get('[event][original]')).to eq(buffer.string) if ecs_compatibility != :disabled
|
|
73
|
-
end
|
|
74
|
-
end
|
|
75
|
-
|
|
76
|
-
it "should raise an error if base64 encoded data is provided" do
|
|
77
|
-
schema = Avro::Schema.parse(avro_config['schema_uri'])
|
|
78
|
-
dw = Avro::IO::DatumWriter.new(schema)
|
|
79
|
-
buffer = StringIO.new
|
|
80
|
-
encoder = Avro::IO::BinaryEncoder.new(buffer)
|
|
81
|
-
dw.write(test_event.to_hash, encoder)
|
|
82
|
-
|
|
83
|
-
expect {subject.decode(Base64.strict_encode64(buffer.string))}.to raise_error
|
|
84
|
-
end
|
|
85
|
-
end
|
|
86
|
-
|
|
87
|
-
context "#decode with tag_on_failure" do
|
|
88
|
-
let (:avro_config) {{ 'schema_uri' => '
|
|
89
|
-
{"type": "record", "name": "Test",
|
|
90
|
-
"fields": [{"name": "foo", "type": ["null", "string"]},
|
|
91
|
-
{"name": "bar", "type": "int"}]}',
|
|
92
|
-
'tag_on_failure' => true}}
|
|
93
|
-
|
|
94
|
-
it "should tag event on failure" do
|
|
95
|
-
subject.decode("not avro") do |event|
|
|
96
|
-
insist {event.is_a? LogStash::Event}
|
|
97
|
-
insist {event.get("tags")} == ["_avroparsefailure"]
|
|
98
|
-
end
|
|
99
|
-
end
|
|
100
|
-
end
|
|
101
|
-
|
|
102
|
-
context "#decode with target" do
|
|
103
|
-
let(:avro_target) { "avro_target" }
|
|
104
|
-
let (:avro_config) {{ 'schema_uri' => '
|
|
105
|
-
{"type": "record", "name": "Test",
|
|
106
|
-
"fields": [{"name": "foo", "type": ["null", "string"]},
|
|
107
|
-
{"name": "bar", "type": "int"}]}',
|
|
108
|
-
'target' => avro_target}}
|
|
109
|
-
|
|
110
|
-
it "should return an LogStash::Event with content in target" do
|
|
111
|
-
schema = Avro::Schema.parse(avro_config['schema_uri'])
|
|
112
|
-
dw = Avro::IO::DatumWriter.new(schema)
|
|
113
|
-
buffer = StringIO.new
|
|
114
|
-
encoder = Avro::IO::BinaryEncoder.new(buffer)
|
|
115
|
-
dw.write(test_event.to_hash, encoder)
|
|
116
|
-
|
|
117
|
-
subject.decode(buffer.string) do |event|
|
|
118
|
-
insist {event.get("[#{avro_target}][foo]")} == test_event.get("foo")
|
|
119
|
-
insist {event.get("[#{avro_target}][bar]")} == test_event.get("bar")
|
|
120
|
-
end
|
|
121
|
-
end
|
|
122
|
-
end
|
|
123
|
-
|
|
124
|
-
context "#encode" do
|
|
125
|
-
it "should return avro data from a LogStash::Event" do
|
|
126
|
-
got_event = false
|
|
127
|
-
subject.on_event do |event, data|
|
|
128
|
-
schema = Avro::Schema.parse(avro_config['schema_uri'])
|
|
129
|
-
datum = StringIO.new(Base64.strict_decode64(data))
|
|
130
|
-
decoder = Avro::IO::BinaryDecoder.new(datum)
|
|
131
|
-
datum_reader = Avro::IO::DatumReader.new(schema)
|
|
132
|
-
record = datum_reader.read(decoder)
|
|
133
|
-
|
|
134
|
-
insist {record["foo"]} == test_event.get("foo")
|
|
135
|
-
insist {record["bar"]} == test_event.get("bar")
|
|
136
|
-
insist {event.is_a? LogStash::Event}
|
|
137
|
-
got_event = true
|
|
138
|
-
end
|
|
139
|
-
subject.encode(test_event)
|
|
140
|
-
insist {got_event}
|
|
141
|
-
end
|
|
142
|
-
|
|
143
|
-
context "with binary encoding" do
|
|
144
|
-
let (:avro_config) { super().merge('encoding' => 'binary') }
|
|
145
|
-
|
|
146
|
-
it "should return avro data from a LogStash::Event not base64 encoded" do
|
|
147
|
-
got_event = false
|
|
148
|
-
subject.on_event do |event, data|
|
|
149
|
-
schema = Avro::Schema.parse(avro_config['schema_uri'])
|
|
150
|
-
datum = StringIO.new(data)
|
|
151
|
-
decoder = Avro::IO::BinaryDecoder.new(datum)
|
|
152
|
-
datum_reader = Avro::IO::DatumReader.new(schema)
|
|
153
|
-
record = datum_reader.read(decoder)
|
|
154
|
-
|
|
155
|
-
expect(event).to be_a_kind_of(LogStash::Event)
|
|
156
|
-
expect(event.get("foo")).to eq(test_event.get("foo"))
|
|
157
|
-
expect(event.get("bar")).to eq(test_event.get("bar"))
|
|
158
|
-
got_event = true
|
|
159
|
-
end
|
|
160
|
-
subject.encode(test_event)
|
|
161
|
-
expect(got_event).to be true
|
|
162
|
-
end
|
|
163
|
-
end
|
|
164
|
-
|
|
165
|
-
context "binary data" do
|
|
166
|
-
|
|
167
|
-
let (:avro_config) {{ 'schema_uri' => '{"namespace": "com.systems.test.data",
|
|
168
|
-
"type": "record",
|
|
169
|
-
"name": "TestRecord",
|
|
170
|
-
"fields": [
|
|
171
|
-
{"name": "name", "type": ["string", "null"]},
|
|
172
|
-
{"name": "longitude", "type": ["double", "null"]},
|
|
173
|
-
{"name": "latitude", "type": ["double", "null"]}
|
|
174
|
-
]
|
|
175
|
-
}' }}
|
|
176
|
-
let (:test_event) {LogStash::Event.new({ "name" => "foo", "longitude" => 21.01234.to_f, "latitude" => 111.0123.to_f })}
|
|
177
|
-
|
|
178
|
-
subject do
|
|
179
|
-
allow_any_instance_of(LogStash::Codecs::Avro).to \
|
|
180
|
-
receive(:open_and_read).and_return(avro_config['schema_uri'])
|
|
181
|
-
next LogStash::Codecs::Avro.new(avro_config)
|
|
182
|
-
end
|
|
183
|
-
|
|
184
|
-
it "should correctly encode binary data" do
|
|
185
|
-
schema = Avro::Schema.parse(avro_config['schema_uri'])
|
|
186
|
-
dw = Avro::IO::DatumWriter.new(schema)
|
|
187
|
-
buffer = StringIO.new
|
|
188
|
-
encoder = Avro::IO::BinaryEncoder.new(buffer)
|
|
189
|
-
dw.write(test_event.to_hash, encoder)
|
|
190
|
-
|
|
191
|
-
subject.decode(Base64.strict_encode64(buffer.string)) do |event|
|
|
192
|
-
insist {event.is_a? LogStash::Event}
|
|
193
|
-
insist {event.get("name")} == test_event.get("name")
|
|
194
|
-
insist {event.get("longitude")} == test_event.get("longitude")
|
|
195
|
-
insist {event.get("latitude")} == test_event.get("latitude")
|
|
196
|
-
end
|
|
197
|
-
end
|
|
198
|
-
end
|
|
199
|
-
end
|
|
200
|
-
|
|
201
|
-
end
|
|
202
|
-
end
|
|
203
|
-
end
|