logstash-integration-kafka 11.6.3-java → 11.7.0-java
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +6 -0
- data/docs/input-kafka.asciidoc +10 -0
- data/docs/output-kafka.asciidoc +10 -0
- data/lib/logstash/inputs/kafka.rb +4 -0
- data/lib/logstash/outputs/kafka.rb +9 -3
- data/logstash-integration-kafka.gemspec +1 -1
- data/spec/unit/inputs/kafka_spec.rb +12 -4
- data/spec/unit/outputs/kafka_spec.rb +1 -1
- metadata +16 -19
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 73e5319cf0e521282ced1615e3cb864ae58fd4195b39a67e42ff995cb22d5004
|
4
|
+
data.tar.gz: 380f7e07b99231ae047fc8545e7be56132f92b7d8179ae97a1f7f95c49a01033
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 88d9dcbab467d2dc1af0e79e3b76af0cfdb3e2398644f1d4337414965c67813ee6d6829478c3bce8d082bbde269e34c134867a0831d0c4235cf9855c388597cf
|
7
|
+
data.tar.gz: 840eb2502efc99a1517790f315bb4a181697345eb422d36e64f7cf4d577999415acc43c32174ce905e31d0cd5cc00bd8f1b16962e26c78617db8fef3397280dd
|
data/CHANGELOG.md
CHANGED
@@ -1,3 +1,9 @@
|
|
1
|
+
## 11.7.0
|
2
|
+
- Add `reconnect_backoff_max_ms` option for configuring kafka client [#204](https://github.com/logstash-plugins/logstash-integration-kafka/pull/204)
|
3
|
+
|
4
|
+
## 11.6.4
|
5
|
+
- Display exception chain comes from kafka client [#200](https://github.com/logstash-plugins/logstash-integration-kafka/pull/200)
|
6
|
+
|
1
7
|
## 11.6.3
|
2
8
|
- Update kafka client to 3.9.1 and transitive dependencies [#193](https://github.com/logstash-plugins/logstash-integration-kafka/pull/193)
|
3
9
|
|
data/docs/input-kafka.asciidoc
CHANGED
@@ -145,6 +145,7 @@ See the https://kafka.apache.org/{kafka_client_doc}/documentation for more detai
|
|
145
145
|
| <<plugins-{type}s-{plugin}-poll_timeout_ms>> |<<number,number>>|No
|
146
146
|
| <<plugins-{type}s-{plugin}-receive_buffer_bytes>> |<<number,number>>|No
|
147
147
|
| <<plugins-{type}s-{plugin}-reconnect_backoff_ms>> |<<number,number>>|No
|
148
|
+
| <<plugins-{type}s-{plugin}-reconnect_backoff_max_ms>> |<<number,number>>|No
|
148
149
|
| <<plugins-{type}s-{plugin}-request_timeout_ms>> |<<number,number>>|No
|
149
150
|
| <<plugins-{type}s-{plugin}-retry_backoff_ms>> |<<number,number>>|No
|
150
151
|
| <<plugins-{type}s-{plugin}-sasl_client_callback_handler_class>> |<<string,string>>|No
|
@@ -561,6 +562,15 @@ The amount of time to wait before attempting to reconnect to a given host.
|
|
561
562
|
This avoids repeatedly connecting to a host in a tight loop.
|
562
563
|
This backoff applies to all requests sent by the consumer to the broker.
|
563
564
|
|
565
|
+
[id="plugins-{type}s-{plugin}-reconnect_backoff_max_ms"]
|
566
|
+
===== `reconnect_backoff_max_ms`
|
567
|
+
|
568
|
+
* Value type is <<number,number>>
|
569
|
+
* Default value is `1000` milliseconds.
|
570
|
+
|
571
|
+
The maximum amount of time in milliseconds to wait when reconnecting to a broker that has repeatedly failed to connect.
|
572
|
+
If provided, the backoff per host will increase exponentially for each consecutive connection failure, up to this maximum.
|
573
|
+
|
564
574
|
[id="plugins-{type}s-{plugin}-request_timeout_ms"]
|
565
575
|
===== `request_timeout_ms`
|
566
576
|
|
data/docs/output-kafka.asciidoc
CHANGED
@@ -115,6 +115,7 @@ See the https://kafka.apache.org/{kafka_client_doc}/documentation for more detai
|
|
115
115
|
| <<plugins-{type}s-{plugin}-partitioner>> |<<string,string>>|No
|
116
116
|
| <<plugins-{type}s-{plugin}-receive_buffer_bytes>> |<<number,number>>|No
|
117
117
|
| <<plugins-{type}s-{plugin}-reconnect_backoff_ms>> |<<number,number>>|No
|
118
|
+
| <<plugins-{type}s-{plugin}-reconnect_backoff_max_ms>> |<<number,number>>|No
|
118
119
|
| <<plugins-{type}s-{plugin}-request_timeout_ms>> |<<number,number>>|No
|
119
120
|
| <<plugins-{type}s-{plugin}-retries>> |<<number,number>>|No
|
120
121
|
| <<plugins-{type}s-{plugin}-retry_backoff_ms>> |<<number,number>>|No
|
@@ -373,6 +374,15 @@ The size of the TCP receive buffer to use when reading data
|
|
373
374
|
|
374
375
|
The amount of time to wait before attempting to reconnect to a given host when a connection fails.
|
375
376
|
|
377
|
+
[id="plugins-{type}s-{plugin}-reconnect_backoff_max_ms"]
|
378
|
+
===== `reconnect_backoff_max_ms`
|
379
|
+
|
380
|
+
* Value type is <<number,number>>
|
381
|
+
* Default value is `1000`.
|
382
|
+
|
383
|
+
The maximum amount of time in milliseconds to wait when reconnecting to a broker that has repeatedly failed to connect.
|
384
|
+
If provided, the backoff per host will increase exponentially for each consecutive connection failure, up to this maximum.
|
385
|
+
|
376
386
|
[id="plugins-{type}s-{plugin}-request_timeout_ms"]
|
377
387
|
===== `request_timeout_ms`
|
378
388
|
|
@@ -167,6 +167,9 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
|
|
167
167
|
# This avoids repeatedly connecting to a host in a tight loop.
|
168
168
|
# This backoff applies to all connection attempts by the client to a broker.
|
169
169
|
config :reconnect_backoff_ms, :validate => :number, :default => 50 # Kafka default
|
170
|
+
# The maximum amount of time in milliseconds to wait when reconnecting to a broker that has repeatedly failed to connect.
|
171
|
+
# If provided, the backoff per host will increase exponentially for each consecutive connection failure, up to this maximum.
|
172
|
+
config :reconnect_backoff_max_ms, :validate => :number, :default => 1000 # Kafka default
|
170
173
|
# The amount of time to wait before attempting to retry a failed fetch request
|
171
174
|
# to a given topic partition. This avoids repeated fetching-and-failing in a tight loop.
|
172
175
|
config :retry_backoff_ms, :validate => :number, :default => 100 # Kafka default
|
@@ -457,6 +460,7 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
|
|
457
460
|
props.put(kafka::PARTITION_ASSIGNMENT_STRATEGY_CONFIG, partition_assignment_strategy_class) unless partition_assignment_strategy.nil?
|
458
461
|
props.put(kafka::RECEIVE_BUFFER_CONFIG, receive_buffer_bytes.to_s) unless receive_buffer_bytes.nil?
|
459
462
|
props.put(kafka::RECONNECT_BACKOFF_MS_CONFIG, reconnect_backoff_ms.to_s) unless reconnect_backoff_ms.nil?
|
463
|
+
props.put(kafka::RECONNECT_BACKOFF_MAX_MS_CONFIG, reconnect_backoff_max_ms.to_s) unless reconnect_backoff_max_ms.nil?
|
460
464
|
props.put(kafka::REQUEST_TIMEOUT_MS_CONFIG, request_timeout_ms.to_s) unless request_timeout_ms.nil?
|
461
465
|
props.put(kafka::RETRY_BACKOFF_MS_CONFIG, retry_backoff_ms.to_s) unless retry_backoff_ms.nil?
|
462
466
|
props.put(kafka::SEND_BUFFER_CONFIG, send_buffer_bytes.to_s) unless send_buffer_bytes.nil?
|
@@ -116,6 +116,9 @@ class LogStash::Outputs::Kafka < LogStash::Outputs::Base
|
|
116
116
|
config :receive_buffer_bytes, :validate => :number, :default => 32_768 # (32KB) Kafka default
|
117
117
|
# The amount of time to wait before attempting to reconnect to a given host when a connection fails.
|
118
118
|
config :reconnect_backoff_ms, :validate => :number, :default => 50 # Kafka default
|
119
|
+
# The maximum amount of time in milliseconds to wait when reconnecting to a broker that has repeatedly failed to connect.
|
120
|
+
# If provided, the backoff per host will increase exponentially for each consecutive connection failure, up to this maximum.
|
121
|
+
config :reconnect_backoff_max_ms, :validate => :number, :default => 1000 # Kafka default
|
119
122
|
# The default retry behavior is to retry until successful. To prevent data loss,
|
120
123
|
# the use of this setting is discouraged.
|
121
124
|
#
|
@@ -372,6 +375,7 @@ class LogStash::Outputs::Kafka < LogStash::Outputs::Base
|
|
372
375
|
end
|
373
376
|
props.put(kafka::RECEIVE_BUFFER_CONFIG, receive_buffer_bytes.to_s) unless receive_buffer_bytes.nil?
|
374
377
|
props.put(kafka::RECONNECT_BACKOFF_MS_CONFIG, reconnect_backoff_ms.to_s) unless reconnect_backoff_ms.nil?
|
378
|
+
props.put(kafka::RECONNECT_BACKOFF_MAX_MS_CONFIG, reconnect_backoff_max_ms.to_s) unless reconnect_backoff_max_ms.nil?
|
375
379
|
props.put(kafka::REQUEST_TIMEOUT_MS_CONFIG, request_timeout_ms.to_s) unless request_timeout_ms.nil?
|
376
380
|
props.put(kafka::RETRIES_CONFIG, retries.to_s) unless retries.nil?
|
377
381
|
props.put(kafka::RETRY_BACKOFF_MS_CONFIG, retry_backoff_ms.to_s)
|
@@ -391,9 +395,11 @@ class LogStash::Outputs::Kafka < LogStash::Outputs::Base
|
|
391
395
|
|
392
396
|
org.apache.kafka.clients.producer.KafkaProducer.new(props)
|
393
397
|
rescue => e
|
394
|
-
|
395
|
-
|
396
|
-
|
398
|
+
cause_error = e
|
399
|
+
while cause_error
|
400
|
+
logger.error("Kafka producer error chain", :kafka_error_message => "#{cause_error.class}: #{cause_error.message}")
|
401
|
+
cause_error = cause_error.respond_to?(:getCause) ? cause_error.getCause() : nil
|
402
|
+
end
|
397
403
|
raise e
|
398
404
|
end
|
399
405
|
end
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
s.name = 'logstash-integration-kafka'
|
3
|
-
s.version = '11.
|
3
|
+
s.version = '11.7.0'
|
4
4
|
s.licenses = ['Apache-2.0']
|
5
5
|
s.summary = "Integration with Kafka - input and output plugins"
|
6
6
|
s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline "+
|
@@ -378,11 +378,15 @@ describe LogStash::Inputs::Kafka do
|
|
378
378
|
end
|
379
379
|
|
380
380
|
context 'string integer config' do
|
381
|
-
let(:config) { super().merge('session_timeout_ms' => '25000',
|
381
|
+
let(:config) { super().merge('session_timeout_ms' => '25000',
|
382
|
+
'max_poll_interval_ms' => '345000',
|
383
|
+
'reconnect_backoff_max_ms' => '1500') }
|
382
384
|
|
383
385
|
it "sets integer values" do
|
384
386
|
expect(org.apache.kafka.clients.consumer.KafkaConsumer).
|
385
|
-
to receive(:new).with(hash_including('session.timeout.ms' => '25000',
|
387
|
+
to receive(:new).with(hash_including('session.timeout.ms' => '25000',
|
388
|
+
'max.poll.interval.ms' => '345000',
|
389
|
+
'reconnect.backoff.max.ms' => '1500')).
|
386
390
|
and_return kafka_client = double('kafka-consumer')
|
387
391
|
|
388
392
|
expect( subject.send(:create_consumer, 'sample_client-1', 'group_instance_id') ).to be kafka_client
|
@@ -390,11 +394,15 @@ describe LogStash::Inputs::Kafka do
|
|
390
394
|
end
|
391
395
|
|
392
396
|
context 'integer config' do
|
393
|
-
let(:config) { super().merge('session_timeout_ms' => 25200,
|
397
|
+
let(:config) { super().merge('session_timeout_ms' => 25200,
|
398
|
+
'max_poll_interval_ms' => 123_000,
|
399
|
+
'reconnect_backoff_max_ms' => 1500) }
|
394
400
|
|
395
401
|
it "sets integer values" do
|
396
402
|
expect(org.apache.kafka.clients.consumer.KafkaConsumer).
|
397
|
-
to receive(:new).with(hash_including('session.timeout.ms' => '25200',
|
403
|
+
to receive(:new).with(hash_including('session.timeout.ms' => '25200',
|
404
|
+
'max.poll.interval.ms' => '123000',
|
405
|
+
'reconnect.backoff.max.ms' => '1500')).
|
398
406
|
and_return kafka_client = double('kafka-consumer')
|
399
407
|
|
400
408
|
expect( subject.send(:create_consumer, 'sample_client-2', 'group_instance_id') ).to be kafka_client
|
@@ -114,7 +114,7 @@ describe "outputs/kafka" do
|
|
114
114
|
end
|
115
115
|
|
116
116
|
context "when KafkaProducer#send() raises a non-retriable exception" do
|
117
|
-
let(:failcount) {
|
117
|
+
let(:failcount) { 3 }
|
118
118
|
|
119
119
|
let(:exception_classes) { [
|
120
120
|
org.apache.kafka.common.errors.SerializationException,
|
metadata
CHANGED
@@ -1,16 +1,16 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-integration-kafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 11.
|
4
|
+
version: 11.7.0
|
5
5
|
platform: java
|
6
6
|
authors:
|
7
7
|
- Elastic
|
8
|
-
autorequire:
|
9
8
|
bindir: bin
|
10
9
|
cert_chain: []
|
11
|
-
date: 2025-
|
10
|
+
date: 2025-10-03 00:00:00.000000000 Z
|
12
11
|
dependencies:
|
13
12
|
- !ruby/object:Gem::Dependency
|
13
|
+
name: logstash-core-plugin-api
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|
15
15
|
requirements:
|
16
16
|
- - ">="
|
@@ -19,7 +19,6 @@ dependencies:
|
|
19
19
|
- - "<="
|
20
20
|
- !ruby/object:Gem::Version
|
21
21
|
version: '2.99'
|
22
|
-
name: logstash-core-plugin-api
|
23
22
|
type: :runtime
|
24
23
|
prerelease: false
|
25
24
|
version_requirements: !ruby/object:Gem::Requirement
|
@@ -31,12 +30,12 @@ dependencies:
|
|
31
30
|
- !ruby/object:Gem::Version
|
32
31
|
version: '2.99'
|
33
32
|
- !ruby/object:Gem::Dependency
|
33
|
+
name: logstash-core
|
34
34
|
requirement: !ruby/object:Gem::Requirement
|
35
35
|
requirements:
|
36
36
|
- - ">="
|
37
37
|
- !ruby/object:Gem::Version
|
38
38
|
version: 8.3.0
|
39
|
-
name: logstash-core
|
40
39
|
type: :runtime
|
41
40
|
prerelease: false
|
42
41
|
version_requirements: !ruby/object:Gem::Requirement
|
@@ -45,12 +44,12 @@ dependencies:
|
|
45
44
|
- !ruby/object:Gem::Version
|
46
45
|
version: 8.3.0
|
47
46
|
- !ruby/object:Gem::Dependency
|
47
|
+
name: logstash-codec-json
|
48
48
|
requirement: !ruby/object:Gem::Requirement
|
49
49
|
requirements:
|
50
50
|
- - ">="
|
51
51
|
- !ruby/object:Gem::Version
|
52
52
|
version: '0'
|
53
|
-
name: logstash-codec-json
|
54
53
|
type: :runtime
|
55
54
|
prerelease: false
|
56
55
|
version_requirements: !ruby/object:Gem::Requirement
|
@@ -59,12 +58,12 @@ dependencies:
|
|
59
58
|
- !ruby/object:Gem::Version
|
60
59
|
version: '0'
|
61
60
|
- !ruby/object:Gem::Dependency
|
61
|
+
name: logstash-codec-plain
|
62
62
|
requirement: !ruby/object:Gem::Requirement
|
63
63
|
requirements:
|
64
64
|
- - ">="
|
65
65
|
- !ruby/object:Gem::Version
|
66
66
|
version: '0'
|
67
|
-
name: logstash-codec-plain
|
68
67
|
type: :runtime
|
69
68
|
prerelease: false
|
70
69
|
version_requirements: !ruby/object:Gem::Requirement
|
@@ -73,6 +72,7 @@ dependencies:
|
|
73
72
|
- !ruby/object:Gem::Version
|
74
73
|
version: '0'
|
75
74
|
- !ruby/object:Gem::Dependency
|
75
|
+
name: stud
|
76
76
|
requirement: !ruby/object:Gem::Requirement
|
77
77
|
requirements:
|
78
78
|
- - ">="
|
@@ -81,7 +81,6 @@ dependencies:
|
|
81
81
|
- - "<"
|
82
82
|
- !ruby/object:Gem::Version
|
83
83
|
version: 0.1.0
|
84
|
-
name: stud
|
85
84
|
type: :runtime
|
86
85
|
prerelease: false
|
87
86
|
version_requirements: !ruby/object:Gem::Requirement
|
@@ -93,6 +92,7 @@ dependencies:
|
|
93
92
|
- !ruby/object:Gem::Version
|
94
93
|
version: 0.1.0
|
95
94
|
- !ruby/object:Gem::Dependency
|
95
|
+
name: manticore
|
96
96
|
requirement: !ruby/object:Gem::Requirement
|
97
97
|
requirements:
|
98
98
|
- - ">="
|
@@ -101,7 +101,6 @@ dependencies:
|
|
101
101
|
- - "<"
|
102
102
|
- !ruby/object:Gem::Version
|
103
103
|
version: 1.0.0
|
104
|
-
name: manticore
|
105
104
|
type: :runtime
|
106
105
|
prerelease: false
|
107
106
|
version_requirements: !ruby/object:Gem::Requirement
|
@@ -113,12 +112,12 @@ dependencies:
|
|
113
112
|
- !ruby/object:Gem::Version
|
114
113
|
version: 1.0.0
|
115
114
|
- !ruby/object:Gem::Dependency
|
115
|
+
name: logstash-mixin-deprecation_logger_support
|
116
116
|
requirement: !ruby/object:Gem::Requirement
|
117
117
|
requirements:
|
118
118
|
- - "~>"
|
119
119
|
- !ruby/object:Gem::Version
|
120
120
|
version: '1.0'
|
121
|
-
name: logstash-mixin-deprecation_logger_support
|
122
121
|
type: :runtime
|
123
122
|
prerelease: false
|
124
123
|
version_requirements: !ruby/object:Gem::Requirement
|
@@ -127,12 +126,12 @@ dependencies:
|
|
127
126
|
- !ruby/object:Gem::Version
|
128
127
|
version: '1.0'
|
129
128
|
- !ruby/object:Gem::Dependency
|
129
|
+
name: logstash-devutils
|
130
130
|
requirement: !ruby/object:Gem::Requirement
|
131
131
|
requirements:
|
132
132
|
- - ">="
|
133
133
|
- !ruby/object:Gem::Version
|
134
134
|
version: '0'
|
135
|
-
name: logstash-devutils
|
136
135
|
type: :development
|
137
136
|
prerelease: false
|
138
137
|
version_requirements: !ruby/object:Gem::Requirement
|
@@ -141,12 +140,12 @@ dependencies:
|
|
141
140
|
- !ruby/object:Gem::Version
|
142
141
|
version: '0'
|
143
142
|
- !ruby/object:Gem::Dependency
|
143
|
+
name: logstash-codec-line
|
144
144
|
requirement: !ruby/object:Gem::Requirement
|
145
145
|
requirements:
|
146
146
|
- - ">="
|
147
147
|
- !ruby/object:Gem::Version
|
148
148
|
version: '0'
|
149
|
-
name: logstash-codec-line
|
150
149
|
type: :development
|
151
150
|
prerelease: false
|
152
151
|
version_requirements: !ruby/object:Gem::Requirement
|
@@ -155,12 +154,12 @@ dependencies:
|
|
155
154
|
- !ruby/object:Gem::Version
|
156
155
|
version: '0'
|
157
156
|
- !ruby/object:Gem::Dependency
|
157
|
+
name: rspec-wait
|
158
158
|
requirement: !ruby/object:Gem::Requirement
|
159
159
|
requirements:
|
160
160
|
- - ">="
|
161
161
|
- !ruby/object:Gem::Version
|
162
162
|
version: '0'
|
163
|
-
name: rspec-wait
|
164
163
|
type: :development
|
165
164
|
prerelease: false
|
166
165
|
version_requirements: !ruby/object:Gem::Requirement
|
@@ -169,12 +168,12 @@ dependencies:
|
|
169
168
|
- !ruby/object:Gem::Version
|
170
169
|
version: '0'
|
171
170
|
- !ruby/object:Gem::Dependency
|
171
|
+
name: digest-crc
|
172
172
|
requirement: !ruby/object:Gem::Requirement
|
173
173
|
requirements:
|
174
174
|
- - "~>"
|
175
175
|
- !ruby/object:Gem::Version
|
176
176
|
version: 0.5.1
|
177
|
-
name: digest-crc
|
178
177
|
type: :development
|
179
178
|
prerelease: false
|
180
179
|
version_requirements: !ruby/object:Gem::Requirement
|
@@ -183,12 +182,12 @@ dependencies:
|
|
183
182
|
- !ruby/object:Gem::Version
|
184
183
|
version: 0.5.1
|
185
184
|
- !ruby/object:Gem::Dependency
|
185
|
+
name: ruby-kafka
|
186
186
|
requirement: !ruby/object:Gem::Requirement
|
187
187
|
requirements:
|
188
188
|
- - ">="
|
189
189
|
- !ruby/object:Gem::Version
|
190
190
|
version: '0'
|
191
|
-
name: ruby-kafka
|
192
191
|
type: :development
|
193
192
|
prerelease: false
|
194
193
|
version_requirements: !ruby/object:Gem::Requirement
|
@@ -197,12 +196,12 @@ dependencies:
|
|
197
196
|
- !ruby/object:Gem::Version
|
198
197
|
version: '0'
|
199
198
|
- !ruby/object:Gem::Dependency
|
199
|
+
name: snappy
|
200
200
|
requirement: !ruby/object:Gem::Requirement
|
201
201
|
requirements:
|
202
202
|
- - ">="
|
203
203
|
- !ruby/object:Gem::Version
|
204
204
|
version: '0'
|
205
|
-
name: snappy
|
206
205
|
type: :development
|
207
206
|
prerelease: false
|
208
207
|
version_requirements: !ruby/object:Gem::Requirement
|
@@ -259,7 +258,6 @@ metadata:
|
|
259
258
|
logstash_plugin: 'true'
|
260
259
|
logstash_group: integration
|
261
260
|
integration_plugins: logstash-input-kafka,logstash-output-kafka
|
262
|
-
post_install_message:
|
263
261
|
rdoc_options: []
|
264
262
|
require_paths:
|
265
263
|
- lib
|
@@ -275,8 +273,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
275
273
|
- !ruby/object:Gem::Version
|
276
274
|
version: '0'
|
277
275
|
requirements: []
|
278
|
-
rubygems_version: 3.3
|
279
|
-
signing_key:
|
276
|
+
rubygems_version: 3.6.3
|
280
277
|
specification_version: 4
|
281
278
|
summary: Integration with Kafka - input and output plugins
|
282
279
|
test_files:
|