logstash-output-kafka 6.1.5 → 6.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: e444d91d51c0f7fa42cf6815f3a561bf3b60f69f
4
- data.tar.gz: 69b71acc6d10768a8f9207be80430cc4afa19388
3
+ metadata.gz: bcbbb5ea898478acca310b16509dab0e7c5f8e51
4
+ data.tar.gz: e381f9fae7d8e5d211fe734dc6561d4766931373
5
5
  SHA512:
6
- metadata.gz: c80f1b8e0206d35400087d718a415e914376cbe9d285314895949f3934b758507e9f82839300c7e49f683c80a9e2b541cb60d3cd336127b7e9de8a43eeb8b0b3
7
- data.tar.gz: 58c812dd93a356250b0fa2d755d1ee9dbe5b6d6cdae616b206019b2baec41f125f393b1f9490bd661423dff6aa462f2357bd2b35f6fe7337f7aceeec885cd048
6
+ metadata.gz: ab778f5d8557ecf4ee089eceaec2a550c5cd1d603daecabf2d38c2b6ae66d7b3309f9b5e2280a309a8829b2ae6a2997c84946da7b419a75e6a810fe93f0b3f1e
7
+ data.tar.gz: da215aaa55177acadfa439218c6c81a40616c43a185057885d4e211d66d65ebd21f5c02637483d9da0140f70e6b37cd20d27643ed67a93da013347934cc85821
@@ -1,3 +1,9 @@
1
+ ## 6.2.0
2
+ - bump kafka dependency to 0.10.2.1
3
+
4
+ ## 6.1.6
5
+ - bring back log4j1.x compatibility
6
+
1
7
  ## 6.1.5
2
8
  - Fix a NPE when SASL_SSL+PLAIN (no Kerberos) was specified.
3
9
 
@@ -0,0 +1,449 @@
1
+ :plugin: kafka
2
+ :type: output
3
+
4
+ ///////////////////////////////////////////
5
+ START - GENERATED VARIABLES, DO NOT EDIT!
6
+ ///////////////////////////////////////////
7
+ :version: %VERSION%
8
+ :release_date: %RELEASE_DATE%
9
+ :changelog_url: %CHANGELOG_URL%
10
+ :include_path: ../../../logstash/docs/include
11
+ ///////////////////////////////////////////
12
+ END - GENERATED VARIABLES, DO NOT EDIT!
13
+ ///////////////////////////////////////////
14
+
15
+ [id="plugins-{type}-{plugin}"]
16
+
17
+ === Kafka
18
+
19
+ include::{include_path}/plugin_header.asciidoc[]
20
+
21
+ ==== Description
22
+
23
+ Write events to a Kafka topic. This uses the Kafka Producer API to write messages to a topic on
24
+ the broker.
25
+
26
+ Here's a compatibility matrix that shows the Kafka client versions that are compatible with each combination
27
+ of Logstash and the Kafka output plugin:
28
+
29
+ [options="header"]
30
+ |==========================================================
31
+ |Kafka Client Version |Logstash Version |Plugin Version |Why?
32
+ |0.8 |2.0.0 - 2.x.x |<3.0.0 |Legacy, 0.8 is still popular
33
+ |0.9 |2.0.0 - 2.3.x | 3.x.x |Works with the old Ruby Event API (`event['product']['price'] = 10`)
34
+ |0.9 |2.4.x - 5.x.x | 4.x.x |Works with the new getter/setter APIs (`event.set('[product][price]', 10)`)
35
+ |0.10.0.x |2.4.x - 5.x.x | 5.x.x |Not compatible with the <= 0.9 broker
36
+ |0.10.1.x |2.4.x - 5.x.x | 6.x.x |
37
+ |==========================================================
38
+
39
+ NOTE: We recommended that you use matching Kafka client and broker versions. During upgrades, you should
40
+ upgrade brokers before clients because brokers target backwards compatibility. For example, the 0.9 broker
41
+ is compatible with both the 0.8 consumer and 0.9 consumer APIs, but not the other way around.
42
+
43
+ This output supports connecting to Kafka over:
44
+
45
+ * SSL (requires plugin version 3.0.0 or later)
46
+ * Kerberos SASL (requires plugin version 5.1.0 or later)
47
+
48
+ By default security is disabled but can be turned on as needed.
49
+
50
+ The only required configuration is the topic_id. The default codec is plain,
51
+ so events will be persisted on the broker in plain format. Logstash will encode your messages with not
52
+ only the message but also with a timestamp and hostname. If you do not want anything but your message
53
+ passing through, you should make the output configuration something like:
54
+ [source,ruby]
55
+ output {
56
+ kafka {
57
+ codec => plain {
58
+ format => "%{message}"
59
+ }
60
+ topic_id => "mytopic"
61
+ }
62
+ }
63
+ For more information see http://kafka.apache.org/documentation.html#theproducer
64
+
65
+ Kafka producer configuration: http://kafka.apache.org/documentation.html#newproducerconfigs
66
+
67
+ [id="plugins-{type}s-{plugin}-options"]
68
+ ==== Kafka Output Configuration Options
69
+
70
+ This plugin supports the following configuration options plus the <<plugins-{type}s-common-options>> described later.
71
+
72
+ [cols="<,<,<",options="header",]
73
+ |=======================================================================
74
+ |Setting |Input type|Required
75
+ | <<plugins-{type}s-{plugin}-acks>> |<<string,string>>, one of `["0", "1", "all"]`|No
76
+ | <<plugins-{type}s-{plugin}-batch_size>> |<<number,number>>|No
77
+ | <<plugins-{type}s-{plugin}-bootstrap_servers>> |<<string,string>>|No
78
+ | <<plugins-{type}s-{plugin}-buffer_memory>> |<<number,number>>|No
79
+ | <<plugins-{type}s-{plugin}-client_id>> |<<string,string>>|No
80
+ | <<plugins-{type}s-{plugin}-compression_type>> |<<string,string>>, one of `["none", "gzip", "snappy", "lz4"]`|No
81
+ | <<plugins-{type}s-{plugin}-jaas_path>> |a valid filesystem path|No
82
+ | <<plugins-{type}s-{plugin}-kerberos_config>> |a valid filesystem path|No
83
+ | <<plugins-{type}s-{plugin}-key_serializer>> |<<string,string>>|No
84
+ | <<plugins-{type}s-{plugin}-linger_ms>> |<<number,number>>|No
85
+ | <<plugins-{type}s-{plugin}-max_request_size>> |<<number,number>>|No
86
+ | <<plugins-{type}s-{plugin}-message_key>> |<<string,string>>|No
87
+ | <<plugins-{type}s-{plugin}-metadata_fetch_timeout_ms>> |<<number,number>>|No
88
+ | <<plugins-{type}s-{plugin}-metadata_max_age_ms>> |<<number,number>>|No
89
+ | <<plugins-{type}s-{plugin}-receive_buffer_bytes>> |<<number,number>>|No
90
+ | <<plugins-{type}s-{plugin}-reconnect_backoff_ms>> |<<number,number>>|No
91
+ | <<plugins-{type}s-{plugin}-request_timeout_ms>> |<<string,string>>|No
92
+ | <<plugins-{type}s-{plugin}-retries>> |<<number,number>>|No
93
+ | <<plugins-{type}s-{plugin}-retry_backoff_ms>> |<<number,number>>|No
94
+ | <<plugins-{type}s-{plugin}-sasl_kerberos_service_name>> |<<string,string>>|No
95
+ | <<plugins-{type}s-{plugin}-sasl_mechanism>> |<<string,string>>|No
96
+ | <<plugins-{type}s-{plugin}-security_protocol>> |<<string,string>>, one of `["PLAINTEXT", "SSL", "SASL_PLAINTEXT", "SASL_SSL"]`|No
97
+ | <<plugins-{type}s-{plugin}-send_buffer_bytes>> |<<number,number>>|No
98
+ | <<plugins-{type}s-{plugin}-ssl_key_password>> |<<password,password>>|No
99
+ | <<plugins-{type}s-{plugin}-ssl_keystore_location>> |a valid filesystem path|No
100
+ | <<plugins-{type}s-{plugin}-ssl_keystore_password>> |<<password,password>>|No
101
+ | <<plugins-{type}s-{plugin}-ssl_keystore_type>> |<<string,string>>|No
102
+ | <<plugins-{type}s-{plugin}-ssl_truststore_location>> |a valid filesystem path|No
103
+ | <<plugins-{type}s-{plugin}-ssl_truststore_password>> |<<password,password>>|No
104
+ | <<plugins-{type}s-{plugin}-ssl_truststore_type>> |<<string,string>>|No
105
+ | <<plugins-{type}s-{plugin}-topic_id>> |<<string,string>>|Yes
106
+ | <<plugins-{type}s-{plugin}-value_serializer>> |<<string,string>>|No
107
+ |=======================================================================
108
+
109
+ Also see <<plugins-{type}s-common-options>> for a list of options supported by all
110
+ output plugins.
111
+
112
+ &nbsp;
113
+
114
+ [id="plugins-{type}s-{plugin}-acks"]
115
+ ===== `acks`
116
+
117
+ * Value can be any of: `0`, `1`, `all`
118
+ * Default value is `"1"`
119
+
120
+ The number of acknowledgments the producer requires the leader to have received
121
+ before considering a request complete.
122
+
123
+ acks=0, the producer will not wait for any acknowledgment from the server at all.
124
+ acks=1, This will mean the leader will write the record to its local log but
125
+ will respond without awaiting full acknowledgement from all followers.
126
+ acks=all, This means the leader will wait for the full set of in-sync replicas to acknowledge the record.
127
+
128
+ [id="plugins-{type}s-{plugin}-batch_size"]
129
+ ===== `batch_size`
130
+
131
+ * Value type is <<number,number>>
132
+ * Default value is `16384`
133
+
134
+ The producer will attempt to batch records together into fewer requests whenever multiple
135
+ records are being sent to the same partition. This helps performance on both the client
136
+ and the server. This configuration controls the default batch size in bytes.
137
+
138
+ [id="plugins-{type}s-{plugin}-block_on_buffer_full"]
139
+ ===== `block_on_buffer_full` (DEPRECATED)
140
+
141
+ * DEPRECATED WARNING: This configuration item is deprecated and may not be available in future versions.
142
+ * Value type is <<boolean,boolean>>
143
+ * Default value is `true`
144
+
145
+ When our memory buffer is exhausted we must either stop accepting new
146
+ records (block) or throw errors. By default this setting is true and we block,
147
+ however in some scenarios blocking is not desirable and it is better to immediately give an error.
148
+
149
+ [id="plugins-{type}s-{plugin}-bootstrap_servers"]
150
+ ===== `bootstrap_servers`
151
+
152
+ * Value type is <<string,string>>
153
+ * Default value is `"localhost:9092"`
154
+
155
+ This is for bootstrapping and the producer will only use it for getting metadata (topics,
156
+ partitions and replicas). The socket connections for sending the actual data will be
157
+ established based on the broker information returned in the metadata. The format is
158
+ `host1:port1,host2:port2`, and the list can be a subset of brokers or a VIP pointing to a
159
+ subset of brokers.
160
+
161
+ [id="plugins-{type}s-{plugin}-buffer_memory"]
162
+ ===== `buffer_memory`
163
+
164
+ * Value type is <<number,number>>
165
+ * Default value is `33554432`
166
+
167
+ The total bytes of memory the producer can use to buffer records waiting to be sent to the server.
168
+
169
+ [id="plugins-{type}s-{plugin}-client_id"]
170
+ ===== `client_id`
171
+
172
+ * Value type is <<string,string>>
173
+ * There is no default value for this setting.
174
+
175
+ The id string to pass to the server when making requests.
176
+ The purpose of this is to be able to track the source of requests beyond just
177
+ ip/port by allowing a logical application name to be included with the request
178
+
179
+ [id="plugins-{type}s-{plugin}-compression_type"]
180
+ ===== `compression_type`
181
+
182
+ * Value can be any of: `none`, `gzip`, `snappy`, `lz4`
183
+ * Default value is `"none"`
184
+
185
+ The compression type for all data generated by the producer.
186
+ The default is none (i.e. no compression). Valid values are none, gzip, or snappy.
187
+
188
+ [id="plugins-{type}s-{plugin}-jaas_path"]
189
+ ===== `jaas_path`
190
+
191
+ * Value type is <<path,path>>
192
+ * There is no default value for this setting.
193
+
194
+ The Java Authentication and Authorization Service (JAAS) API supplies user authentication and authorization
195
+ services for Kafka. This setting provides the path to the JAAS file. Sample JAAS file for Kafka client:
196
+ [source,java]
197
+ ----------------------------------
198
+ KafkaClient {
199
+ com.sun.security.auth.module.Krb5LoginModule required
200
+ useTicketCache=true
201
+ renewTicket=true
202
+ serviceName="kafka";
203
+ };
204
+ ----------------------------------
205
+
206
+ Please note that specifying `jaas_path` and `kerberos_config` in the config file will add these
207
+ to the global JVM system properties. This means if you have multiple Kafka inputs, all of them would be sharing the same
208
+ `jaas_path` and `kerberos_config`. If this is not desirable, you would have to run separate instances of Logstash on
209
+ different JVM instances.
210
+
211
+ [id="plugins-{type}s-{plugin}-kerberos_config"]
212
+ ===== `kerberos_config`
213
+
214
+ * Value type is <<path,path>>
215
+ * There is no default value for this setting.
216
+
217
+ Optional path to kerberos config file. This is krb5.conf style as detailed in https://web.mit.edu/kerberos/krb5-1.12/doc/admin/conf_files/krb5_conf.html
218
+
219
+ [id="plugins-{type}s-{plugin}-key_serializer"]
220
+ ===== `key_serializer`
221
+
222
+ * Value type is <<string,string>>
223
+ * Default value is `"org.apache.kafka.common.serialization.StringSerializer"`
224
+
225
+ Serializer class for the key of the message
226
+
227
+ [id="plugins-{type}s-{plugin}-linger_ms"]
228
+ ===== `linger_ms`
229
+
230
+ * Value type is <<number,number>>
231
+ * Default value is `0`
232
+
233
+ The producer groups together any records that arrive in between request
234
+ transmissions into a single batched request. Normally this occurs only under
235
+ load when records arrive faster than they can be sent out. However in some circumstances
236
+ the client may want to reduce the number of requests even under moderate load.
237
+ This setting accomplishes this by adding a small amount of artificial delay—that is,
238
+ rather than immediately sending out a record the producer will wait for up to the given delay
239
+ to allow other records to be sent so that the sends can be batched together.
240
+
241
+ [id="plugins-{type}s-{plugin}-max_request_size"]
242
+ ===== `max_request_size`
243
+
244
+ * Value type is <<number,number>>
245
+ * Default value is `1048576`
246
+
247
+ The maximum size of a request
248
+
249
+ [id="plugins-{type}s-{plugin}-message_key"]
250
+ ===== `message_key`
251
+
252
+ * Value type is <<string,string>>
253
+ * There is no default value for this setting.
254
+
255
+ The key for the message
256
+
257
+ [id="plugins-{type}s-{plugin}-metadata_fetch_timeout_ms"]
258
+ ===== `metadata_fetch_timeout_ms`
259
+
260
+ * Value type is <<number,number>>
261
+ * Default value is `60000`
262
+
263
+ the timeout setting for initial metadata request to fetch topic metadata.
264
+
265
+ [id="plugins-{type}s-{plugin}-metadata_max_age_ms"]
266
+ ===== `metadata_max_age_ms`
267
+
268
+ * Value type is <<number,number>>
269
+ * Default value is `300000`
270
+
271
+ the max time in milliseconds before a metadata refresh is forced.
272
+
273
+ [id="plugins-{type}s-{plugin}-receive_buffer_bytes"]
274
+ ===== `receive_buffer_bytes`
275
+
276
+ * Value type is <<number,number>>
277
+ * Default value is `32768`
278
+
279
+ The size of the TCP receive buffer to use when reading data
280
+
281
+ [id="plugins-{type}s-{plugin}-reconnect_backoff_ms"]
282
+ ===== `reconnect_backoff_ms`
283
+
284
+ * Value type is <<number,number>>
285
+ * Default value is `10`
286
+
287
+ The amount of time to wait before attempting to reconnect to a given host when a connection fails.
288
+
289
+ [id="plugins-{type}s-{plugin}-request_timeout_ms"]
290
+ ===== `request_timeout_ms`
291
+
292
+ * Value type is <<string,string>>
293
+ * There is no default value for this setting.
294
+
295
+ The configuration controls the maximum amount of time the client will wait
296
+ for the response of a request. If the response is not received before the timeout
297
+ elapses the client will resend the request if necessary or fail the request if
298
+ retries are exhausted.
299
+
300
+ [id="plugins-{type}s-{plugin}-retries"]
301
+ ===== `retries`
302
+
303
+ * Value type is <<number,number>>
304
+ * Default value is `0`
305
+
306
+ Setting a value greater than zero will cause the client to
307
+ resend any record whose send fails with a potentially transient error.
308
+
309
+ [id="plugins-{type}s-{plugin}-retry_backoff_ms"]
310
+ ===== `retry_backoff_ms`
311
+
312
+ * Value type is <<number,number>>
313
+ * Default value is `100`
314
+
315
+ The amount of time to wait before attempting to retry a failed produce request to a given topic partition.
316
+
317
+ [id="plugins-{type}s-{plugin}-sasl_kerberos_service_name"]
318
+ ===== `sasl_kerberos_service_name`
319
+
320
+ * Value type is <<string,string>>
321
+ * There is no default value for this setting.
322
+
323
+ The Kerberos principal name that Kafka broker runs as.
324
+ This can be defined either in Kafka's JAAS config or in Kafka's config.
325
+
326
+ [id="plugins-{type}s-{plugin}-sasl_mechanism"]
327
+ ===== `sasl_mechanism`
328
+
329
+ * Value type is <<string,string>>
330
+ * Default value is `"GSSAPI"`
331
+
332
+ http://kafka.apache.org/documentation.html#security_sasl[SASL mechanism] used for client connections.
333
+ This may be any mechanism for which a security provider is available.
334
+ GSSAPI is the default mechanism.
335
+
336
+ [id="plugins-{type}s-{plugin}-security_protocol"]
337
+ ===== `security_protocol`
338
+
339
+ * Value can be any of: `PLAINTEXT`, `SSL`, `SASL_PLAINTEXT`, `SASL_SSL`
340
+ * Default value is `"PLAINTEXT"`
341
+
342
+ Security protocol to use, which can be either of PLAINTEXT,SSL,SASL_PLAINTEXT,SASL_SSL
343
+
344
+ [id="plugins-{type}s-{plugin}-send_buffer_bytes"]
345
+ ===== `send_buffer_bytes`
346
+
347
+ * Value type is <<number,number>>
348
+ * Default value is `131072`
349
+
350
+ The size of the TCP send buffer to use when sending data.
351
+
352
+ [id="plugins-{type}s-{plugin}-ssl"]
353
+ ===== `ssl` (DEPRECATED)
354
+
355
+ * DEPRECATED WARNING: This configuration item is deprecated and may not be available in future versions.
356
+ * Value type is <<boolean,boolean>>
357
+ * Default value is `false`
358
+
359
+ Enable SSL/TLS secured communication to Kafka broker.
360
+
361
+ [id="plugins-{type}s-{plugin}-ssl_key_password"]
362
+ ===== `ssl_key_password`
363
+
364
+ * Value type is <<password,password>>
365
+ * There is no default value for this setting.
366
+
367
+ The password of the private key in the key store file.
368
+
369
+ [id="plugins-{type}s-{plugin}-ssl_keystore_location"]
370
+ ===== `ssl_keystore_location`
371
+
372
+ * Value type is <<path,path>>
373
+ * There is no default value for this setting.
374
+
375
+ If client authentication is required, this setting stores the keystore path.
376
+
377
+ [id="plugins-{type}s-{plugin}-ssl_keystore_password"]
378
+ ===== `ssl_keystore_password`
379
+
380
+ * Value type is <<password,password>>
381
+ * There is no default value for this setting.
382
+
383
+ If client authentication is required, this setting stores the keystore password
384
+
385
+ [id="plugins-{type}s-{plugin}-ssl_keystore_type"]
386
+ ===== `ssl_keystore_type`
387
+
388
+ * Value type is <<string,string>>
389
+ * There is no default value for this setting.
390
+
391
+ The keystore type.
392
+
393
+ [id="plugins-{type}s-{plugin}-ssl_truststore_location"]
394
+ ===== `ssl_truststore_location`
395
+
396
+ * Value type is <<path,path>>
397
+ * There is no default value for this setting.
398
+
399
+ The JKS truststore path to validate the Kafka broker's certificate.
400
+
401
+ [id="plugins-{type}s-{plugin}-ssl_truststore_password"]
402
+ ===== `ssl_truststore_password`
403
+
404
+ * Value type is <<password,password>>
405
+ * There is no default value for this setting.
406
+
407
+ The truststore password
408
+
409
+ [id="plugins-{type}s-{plugin}-ssl_truststore_type"]
410
+ ===== `ssl_truststore_type`
411
+
412
+ * Value type is <<string,string>>
413
+ * There is no default value for this setting.
414
+
415
+ The truststore type.
416
+
417
+ [id="plugins-{type}s-{plugin}-timeout_ms"]
418
+ ===== `timeout_ms` (DEPRECATED)
419
+
420
+ * DEPRECATED WARNING: This configuration item is deprecated and may not be available in future versions.
421
+ * Value type is <<number,number>>
422
+ * Default value is `30000`
423
+
424
+ The configuration controls the maximum amount of time the server will wait for acknowledgments
425
+ from followers to meet the acknowledgment requirements the producer has specified with the
426
+ acks configuration. If the requested number of acknowledgments are not met when the timeout
427
+ elapses an error will be returned. This timeout is measured on the server side and does not
428
+ include the network latency of the request.
429
+
430
+ [id="plugins-{type}s-{plugin}-topic_id"]
431
+ ===== `topic_id`
432
+
433
+ * This is a required setting.
434
+ * Value type is <<string,string>>
435
+ * There is no default value for this setting.
436
+
437
+ The topic to produce messages to
438
+
439
+ [id="plugins-{type}s-{plugin}-value_serializer"]
440
+ ===== `value_serializer`
441
+
442
+ * Value type is <<string,string>>
443
+ * Default value is `"org.apache.kafka.common.serialization.StringSerializer"`
444
+
445
+ Serializer class for the value of the message
446
+
447
+
448
+
449
+ include::{include_path}/{type}.asciidoc[]
@@ -1,7 +1,7 @@
1
1
  Gem::Specification.new do |s|
2
2
 
3
3
  s.name = 'logstash-output-kafka'
4
- s.version = '6.1.5'
4
+ s.version = '6.2.0'
5
5
  s.licenses = ['Apache License (2.0)']
6
6
  s.summary = 'Output events to a Kafka topic. This uses the Kafka Producer API to write messages to a topic on the broker'
7
7
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
@@ -11,7 +11,7 @@ Gem::Specification.new do |s|
11
11
  s.require_paths = ['lib']
12
12
 
13
13
  # Files
14
- s.files = Dir['lib/**/*.rb','spec/**/*','vendor/**/*','*.gemspec','*.md','CONTRIBUTORS','Gemfile','LICENSE','NOTICE.TXT']
14
+ s.files = Dir["lib/**/*","spec/**/*","*.gemspec","*.md","CONTRIBUTORS","Gemfile","LICENSE","NOTICE.TXT", "vendor/jar-dependencies/**/*.jar", "vendor/jar-dependencies/**/*.rb", "VERSION", "docs/**/*"]
15
15
 
16
16
  # Tests
17
17
  s.test_files = s.files.grep(%r{^(test|spec|features)/})
@@ -19,8 +19,9 @@ Gem::Specification.new do |s|
19
19
  # Special flag to let us know this is actually a logstash plugin
20
20
  s.metadata = { 'logstash_plugin' => 'true', 'group' => 'output'}
21
21
 
22
- s.requirements << "jar 'org.apache.kafka:kafka-clients', '0.10.1.1'"
22
+ s.requirements << "jar 'org.apache.kafka:kafka-clients', '0.10.2.1'"
23
23
  s.requirements << "jar 'org.slf4j:slf4j-log4j12', '1.7.21'"
24
+ s.requirements << "jar 'org.apache.logging.log4j:log4j-1.2-api', '2.6.2'"
24
25
 
25
26
  s.add_development_dependency 'jar-dependencies', '~> 0.3.2'
26
27
 
metadata CHANGED
@@ -1,39 +1,30 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-output-kafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 6.1.5
4
+ version: 6.2.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Elasticsearch
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2017-04-08 00:00:00.000000000 Z
11
+ date: 2017-05-11 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
- name: jar-dependencies
15
- version_requirements: !ruby/object:Gem::Requirement
16
- requirements:
17
- - - "~>"
18
- - !ruby/object:Gem::Version
19
- version: 0.3.2
20
14
  requirement: !ruby/object:Gem::Requirement
21
15
  requirements:
22
16
  - - "~>"
23
17
  - !ruby/object:Gem::Version
24
18
  version: 0.3.2
19
+ name: jar-dependencies
25
20
  prerelease: false
26
21
  type: :development
27
- - !ruby/object:Gem::Dependency
28
- name: logstash-core-plugin-api
29
22
  version_requirements: !ruby/object:Gem::Requirement
30
23
  requirements:
31
- - - ">="
32
- - !ruby/object:Gem::Version
33
- version: '1.60'
34
- - - "<="
24
+ - - "~>"
35
25
  - !ruby/object:Gem::Version
36
- version: '2.99'
26
+ version: 0.3.2
27
+ - !ruby/object:Gem::Dependency
37
28
  requirement: !ruby/object:Gem::Requirement
38
29
  requirements:
39
30
  - - ">="
@@ -42,78 +33,87 @@ dependencies:
42
33
  - - "<="
43
34
  - !ruby/object:Gem::Version
44
35
  version: '2.99'
36
+ name: logstash-core-plugin-api
45
37
  prerelease: false
46
38
  type: :runtime
47
- - !ruby/object:Gem::Dependency
48
- name: logstash-codec-plain
49
39
  version_requirements: !ruby/object:Gem::Requirement
50
40
  requirements:
51
41
  - - ">="
52
42
  - !ruby/object:Gem::Version
53
- version: '0'
43
+ version: '1.60'
44
+ - - "<="
45
+ - !ruby/object:Gem::Version
46
+ version: '2.99'
47
+ - !ruby/object:Gem::Dependency
54
48
  requirement: !ruby/object:Gem::Requirement
55
49
  requirements:
56
50
  - - ">="
57
51
  - !ruby/object:Gem::Version
58
52
  version: '0'
53
+ name: logstash-codec-plain
59
54
  prerelease: false
60
55
  type: :runtime
61
- - !ruby/object:Gem::Dependency
62
- name: logstash-codec-json
63
56
  version_requirements: !ruby/object:Gem::Requirement
64
57
  requirements:
65
58
  - - ">="
66
59
  - !ruby/object:Gem::Version
67
60
  version: '0'
61
+ - !ruby/object:Gem::Dependency
68
62
  requirement: !ruby/object:Gem::Requirement
69
63
  requirements:
70
64
  - - ">="
71
65
  - !ruby/object:Gem::Version
72
66
  version: '0'
67
+ name: logstash-codec-json
73
68
  prerelease: false
74
69
  type: :runtime
75
- - !ruby/object:Gem::Dependency
76
- name: logstash-devutils
77
70
  version_requirements: !ruby/object:Gem::Requirement
78
71
  requirements:
79
72
  - - ">="
80
73
  - !ruby/object:Gem::Version
81
74
  version: '0'
75
+ - !ruby/object:Gem::Dependency
82
76
  requirement: !ruby/object:Gem::Requirement
83
77
  requirements:
84
78
  - - ">="
85
79
  - !ruby/object:Gem::Version
86
80
  version: '0'
81
+ name: logstash-devutils
87
82
  prerelease: false
88
83
  type: :development
89
- - !ruby/object:Gem::Dependency
90
- name: poseidon
91
84
  version_requirements: !ruby/object:Gem::Requirement
92
85
  requirements:
93
86
  - - ">="
94
87
  - !ruby/object:Gem::Version
95
88
  version: '0'
89
+ - !ruby/object:Gem::Dependency
96
90
  requirement: !ruby/object:Gem::Requirement
97
91
  requirements:
98
92
  - - ">="
99
93
  - !ruby/object:Gem::Version
100
94
  version: '0'
95
+ name: poseidon
101
96
  prerelease: false
102
97
  type: :development
103
- - !ruby/object:Gem::Dependency
104
- name: snappy
105
98
  version_requirements: !ruby/object:Gem::Requirement
106
99
  requirements:
107
100
  - - ">="
108
101
  - !ruby/object:Gem::Version
109
102
  version: '0'
103
+ - !ruby/object:Gem::Dependency
110
104
  requirement: !ruby/object:Gem::Requirement
111
105
  requirements:
112
106
  - - ">="
113
107
  - !ruby/object:Gem::Version
114
108
  version: '0'
109
+ name: snappy
115
110
  prerelease: false
116
111
  type: :development
112
+ version_requirements: !ruby/object:Gem::Requirement
113
+ requirements:
114
+ - - ">="
115
+ - !ruby/object:Gem::Version
116
+ version: '0'
117
117
  description: This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program
118
118
  email: info@elastic.co
119
119
  executables: []
@@ -127,13 +127,26 @@ files:
127
127
  - LICENSE
128
128
  - NOTICE.TXT
129
129
  - README.md
130
+ - docs/index.asciidoc
131
+ - lib/log4j/log4j/1.2.17/log4j-1.2.17.jar
130
132
  - lib/logstash-output-kafka_jars.rb
131
133
  - lib/logstash/outputs/kafka.rb
134
+ - lib/net/jpountz/lz4/lz4/1.3.0/lz4-1.3.0.jar
135
+ - lib/org/apache/kafka/kafka-clients/0.10.2.1/kafka-clients-0.10.2.1.jar
136
+ - lib/org/apache/logging/log4j/log4j-1.2-api/2.6.2/log4j-1.2-api-2.6.2.jar
137
+ - lib/org/apache/logging/log4j/log4j-api/2.6.2/log4j-api-2.6.2.jar
138
+ - lib/org/apache/logging/log4j/log4j-core/2.6.2/log4j-core-2.6.2.jar
139
+ - lib/org/slf4j/slf4j-api/1.7.21/slf4j-api-1.7.21.jar
140
+ - lib/org/slf4j/slf4j-log4j12/1.7.21/slf4j-log4j12-1.7.21.jar
141
+ - lib/org/xerial/snappy/snappy-java/1.1.2.6/snappy-java-1.1.2.6.jar
132
142
  - logstash-output-kafka.gemspec
133
143
  - spec/integration/outputs/kafka_spec.rb
134
144
  - spec/unit/outputs/kafka_spec.rb
135
- - vendor/jar-dependencies/runtime-jars/kafka-clients-0.10.1.1.jar
145
+ - vendor/jar-dependencies/runtime-jars/kafka-clients-0.10.2.1.jar
146
+ - vendor/jar-dependencies/runtime-jars/log4j-1.2-api-2.6.2.jar
136
147
  - vendor/jar-dependencies/runtime-jars/log4j-1.2.17.jar
148
+ - vendor/jar-dependencies/runtime-jars/log4j-api-2.6.2.jar
149
+ - vendor/jar-dependencies/runtime-jars/log4j-core-2.6.2.jar
137
150
  - vendor/jar-dependencies/runtime-jars/lz4-1.3.0.jar
138
151
  - vendor/jar-dependencies/runtime-jars/slf4j-api-1.7.21.jar
139
152
  - vendor/jar-dependencies/runtime-jars/slf4j-log4j12-1.7.21.jar
@@ -159,8 +172,9 @@ required_rubygems_version: !ruby/object:Gem::Requirement
159
172
  - !ruby/object:Gem::Version
160
173
  version: '0'
161
174
  requirements:
162
- - jar 'org.apache.kafka:kafka-clients', '0.10.1.1'
175
+ - jar 'org.apache.kafka:kafka-clients', '0.10.2.1'
163
176
  - jar 'org.slf4j:slf4j-log4j12', '1.7.21'
177
+ - jar 'org.apache.logging.log4j:log4j-1.2-api', '2.6.2'
164
178
  rubyforge_project:
165
179
  rubygems_version: 2.4.8
166
180
  signing_key: