logstash-input-jdbc 4.2.0 → 4.2.1

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 985a22e0b06671dd557cc71e2174b39c654b15ee
4
- data.tar.gz: 47347232f18c67f7f313814cb9663fcca0436d52
3
+ metadata.gz: 092e53a0d727fc85cbcb20dc9cbbb6af813bb535
4
+ data.tar.gz: 39aa1ec2ca42438a90a7cf3475c9eee6fe800622
5
5
  SHA512:
6
- metadata.gz: 6048835db426fc1aab07e4324b358cd39e5db309144b20a34a90372474a110543c681b98cc5563cc8bc1042277565ca1f26f1420e47bfdba8fe7f6dac9c68a89
7
- data.tar.gz: 17751bc5af1d066c19eaa15d2ef81b84b6a7556b88a2a2371b358162ba68f52e6ae0430ae5835bbd61f386974482ce27dc6244e128092fd46094b8e7889de06b
6
+ metadata.gz: db44c1a23b3590320c8ccdabd0118a7ed5df70cb4e124b0af089a7b0cad2b56a005c3e9609d4581685e1b01514257ef618c95103cb915616f3a01e72da864f79
7
+ data.tar.gz: 4a1d088041137bc24e0c789424705d889eea7a0fcb367548e36807c8d1a29a3fa595e1cba7af5ce08afaa31eafcdae6e88e79b8709110ff8e9ec126ca3e687d6
data/Gemfile CHANGED
@@ -1,4 +1,11 @@
1
1
  source 'https://rubygems.org'
2
2
 
3
- # Specify your gem's dependencies in logstash-mass_effect.gemspec
4
3
  gemspec
4
+
5
+ logstash_path = ENV["LOGSTASH_PATH"] || "../../logstash"
6
+ use_logstash_source = ENV["LOGSTASH_SOURCE"] && ENV["LOGSTASH_SOURCE"].to_s == "1"
7
+
8
+ if Dir.exist?(logstash_path) && use_logstash_source
9
+ gem 'logstash-core', :path => "#{logstash_path}/logstash-core"
10
+ gem 'logstash-core-plugin-api', :path => "#{logstash_path}/logstash-core-plugin-api"
11
+ end
@@ -0,0 +1,486 @@
1
+ :plugin: jdbc
2
+ :type: input
3
+
4
+ ///////////////////////////////////////////
5
+ START - GENERATED VARIABLES, DO NOT EDIT!
6
+ ///////////////////////////////////////////
7
+ :version: %VERSION%
8
+ :release_date: %RELEASE_DATE%
9
+ :changelog_url: %CHANGELOG_URL%
10
+ :include_path: ../../../../logstash/docs/include
11
+ ///////////////////////////////////////////
12
+ END - GENERATED VARIABLES, DO NOT EDIT!
13
+ ///////////////////////////////////////////
14
+
15
+ [id="plugins-{type}-{plugin}"]
16
+
17
+ === Jdbc input plugin
18
+
19
+ include::{include_path}/plugin_header.asciidoc[]
20
+
21
+ ==== Description
22
+
23
+ This plugin was created as a way to ingest data in any database
24
+ with a JDBC interface into Logstash. You can periodically schedule ingestion
25
+ using a cron syntax (see `schedule` setting) or run the query one time to load
26
+ data into Logstash. Each row in the resultset becomes a single event.
27
+ Columns in the resultset are converted into fields in the event.
28
+
29
+ ==== Drivers
30
+
31
+ This plugin does not come packaged with JDBC driver libraries. The desired
32
+ jdbc driver library must be explicitly passed in to the plugin using the
33
+ `jdbc_driver_library` configuration option.
34
+
35
+ ==== Scheduling
36
+
37
+ Input from this plugin can be scheduled to run periodically according to a specific
38
+ schedule. This scheduling syntax is powered by https://github.com/jmettraux/rufus-scheduler[rufus-scheduler].
39
+ The syntax is cron-like with some extensions specific to Rufus (e.g. timezone support ).
40
+
41
+ Examples:
42
+
43
+ |==========================================================
44
+ | `* 5 * 1-3 *` | will execute every minute of 5am every day of January through March.
45
+ | `0 * * * *` | will execute on the 0th minute of every hour every day.
46
+ | `0 6 * * * America/Chicago` | will execute at 6:00am (UTC/GMT -5) every day.
47
+ |==========================================================
48
+
49
+
50
+ Further documentation describing this syntax can be found https://github.com/jmettraux/rufus-scheduler#parsing-cronlines-and-time-strings[here].
51
+
52
+ ==== State
53
+
54
+ The plugin will persist the `sql_last_value` parameter in the form of a
55
+ metadata file stored in the configured `last_run_metadata_path`. Upon query execution,
56
+ this file will be updated with the current value of `sql_last_value`. Next time
57
+ the pipeline starts up, this value will be updated by reading from the file. If
58
+ `clean_run` is set to true, this value will be ignored and `sql_last_value` will be
59
+ set to Jan 1, 1970, or 0 if `use_column_value` is true, as if no query has ever been executed.
60
+
61
+ ==== Dealing With Large Result-sets
62
+
63
+ Many JDBC drivers use the `fetch_size` parameter to limit how many
64
+ results are pre-fetched at a time from the cursor into the client's cache
65
+ before retrieving more results from the result-set. This is configured in
66
+ this plugin using the `jdbc_fetch_size` configuration option. No fetch size
67
+ is set by default in this plugin, so the specific driver's default size will
68
+ be used.
69
+
70
+ ==== Usage:
71
+
72
+ Here is an example of setting up the plugin to fetch data from a MySQL database.
73
+ First, we place the appropriate JDBC driver library in our current
74
+ path (this can be placed anywhere on your filesystem). In this example, we connect to
75
+ the 'mydb' database using the user: 'mysql' and wish to input all rows in the 'songs'
76
+ table that match a specific artist. The following examples demonstrates a possible
77
+ Logstash configuration for this. The `schedule` option in this example will
78
+ instruct the plugin to execute this input statement on the minute, every minute.
79
+
80
+ [source,ruby]
81
+ ------------------------------------------------------------------------------
82
+ input {
83
+ jdbc {
84
+ jdbc_driver_library => "mysql-connector-java-5.1.36-bin.jar"
85
+ jdbc_driver_class => "com.mysql.jdbc.Driver"
86
+ jdbc_connection_string => "jdbc:mysql://localhost:3306/mydb"
87
+ jdbc_user => "mysql"
88
+ parameters => { "favorite_artist" => "Beethoven" }
89
+ schedule => "* * * * *"
90
+ statement => "SELECT * from songs where artist = :favorite_artist"
91
+ }
92
+ }
93
+ ------------------------------------------------------------------------------
94
+
95
+ ==== Configuring SQL statement
96
+
97
+ A sql statement is required for this input. This can be passed-in via a
98
+ statement option in the form of a string, or read from a file (`statement_filepath`). File
99
+ option is typically used when the SQL statement is large or cumbersome to supply in the config.
100
+ The file option only supports one SQL statement. The plugin will only accept one of the options.
101
+ It cannot read a statement from a file as well as from the `statement` configuration parameter.
102
+
103
+ ==== Configuring multiple SQL statements
104
+
105
+ Configuring multiple SQL statements is useful when there is a need to query and ingest data
106
+ from different database tables or views. It is possible to define separate Logstash
107
+ configuration files for each statement or to define multiple statements in a single configuration
108
+ file. When using multiple statements in a single Logstash configuration file, each statement
109
+ has to be defined as a separate jdbc input (including jdbc driver, connection string and other
110
+ required parameters).
111
+
112
+ Please note that if any of the statements use the `sql_last_value` parameter (e.g. for
113
+ ingesting only data changed since last run), each input should define its own
114
+ `last_run_metadata_path` parameter. Failure to do so will result in undesired behaviour, as
115
+ all inputs will store their state to the same (default) metadata file, effectively
116
+ overwriting each other's `sql_last_value`.
117
+
118
+ ==== Predefined Parameters
119
+
120
+ Some parameters are built-in and can be used from within your queries.
121
+ Here is the list:
122
+
123
+ |==========================================================
124
+ |sql_last_value | The value used to calculate which rows to query. Before any query is run,
125
+ this is set to Thursday, 1 January 1970, or 0 if `use_column_value` is true and
126
+ `tracking_column` is set. It is updated accordingly after subsequent queries are run.
127
+ |==========================================================
128
+
129
+ Example:
130
+ [source,ruby]
131
+ ---------------------------------------------------------------------------------------------------
132
+ input {
133
+ jdbc {
134
+ statement => "SELECT id, mycolumn1, mycolumn2 FROM my_table WHERE id > :sql_last_value"
135
+ use_column_value => true
136
+ tracking_column => "id"
137
+ # ... other configuration bits
138
+ }
139
+ }
140
+ ---------------------------------------------------------------------------------------------------
141
+
142
+
143
+ [id="plugins-{type}s-{plugin}-options"]
144
+ ==== Jdbc Input Configuration Options
145
+
146
+ This plugin supports the following configuration options plus the <<plugins-{type}s-{plugin}-common-options>> described later.
147
+
148
+ [cols="<,<,<",options="header",]
149
+ |=======================================================================
150
+ |Setting |Input type|Required
151
+ | <<plugins-{type}s-{plugin}-clean_run>> |<<boolean,boolean>>|No
152
+ | <<plugins-{type}s-{plugin}-columns_charset>> |<<hash,hash>>|No
153
+ | <<plugins-{type}s-{plugin}-connection_retry_attempts>> |<<number,number>>|No
154
+ | <<plugins-{type}s-{plugin}-connection_retry_attempts_wait_time>> |<<number,number>>|No
155
+ | <<plugins-{type}s-{plugin}-jdbc_connection_string>> |<<string,string>>|Yes
156
+ | <<plugins-{type}s-{plugin}-jdbc_default_timezone>> |<<string,string>>|No
157
+ | <<plugins-{type}s-{plugin}-jdbc_driver_class>> |<<string,string>>|Yes
158
+ | <<plugins-{type}s-{plugin}-jdbc_driver_library>> |<<string,string>>|No
159
+ | <<plugins-{type}s-{plugin}-jdbc_fetch_size>> |<<number,number>>|No
160
+ | <<plugins-{type}s-{plugin}-jdbc_page_size>> |<<number,number>>|No
161
+ | <<plugins-{type}s-{plugin}-jdbc_paging_enabled>> |<<boolean,boolean>>|No
162
+ | <<plugins-{type}s-{plugin}-jdbc_password>> |<<password,password>>|No
163
+ | <<plugins-{type}s-{plugin}-jdbc_password_filepath>> |a valid filesystem path|No
164
+ | <<plugins-{type}s-{plugin}-jdbc_pool_timeout>> |<<number,number>>|No
165
+ | <<plugins-{type}s-{plugin}-jdbc_user>> |<<string,string>>|Yes
166
+ | <<plugins-{type}s-{plugin}-jdbc_validate_connection>> |<<boolean,boolean>>|No
167
+ | <<plugins-{type}s-{plugin}-jdbc_validation_timeout>> |<<number,number>>|No
168
+ | <<plugins-{type}s-{plugin}-last_run_metadata_path>> |<<string,string>>|No
169
+ | <<plugins-{type}s-{plugin}-lowercase_column_names>> |<<boolean,boolean>>|No
170
+ | <<plugins-{type}s-{plugin}-parameters>> |<<hash,hash>>|No
171
+ | <<plugins-{type}s-{plugin}-record_last_run>> |<<boolean,boolean>>|No
172
+ | <<plugins-{type}s-{plugin}-schedule>> |<<string,string>>|No
173
+ | <<plugins-{type}s-{plugin}-sequel_opts>> |<<hash,hash>>|No
174
+ | <<plugins-{type}s-{plugin}-sql_log_level>> |<<string,string>>, one of `["fatal", "error", "warn", "info", "debug"]`|No
175
+ | <<plugins-{type}s-{plugin}-statement>> |<<string,string>>|No
176
+ | <<plugins-{type}s-{plugin}-statement_filepath>> |a valid filesystem path|No
177
+ | <<plugins-{type}s-{plugin}-tracking_column>> |<<string,string>>|No
178
+ | <<plugins-{type}s-{plugin}-tracking_column_type>> |<<string,string>>, one of `["numeric", "timestamp"]`|No
179
+ | <<plugins-{type}s-{plugin}-use_column_value>> |<<boolean,boolean>>|No
180
+ |=======================================================================
181
+
182
+ Also see <<plugins-{type}s-{plugin}-common-options>> for a list of options supported by all
183
+ input plugins.
184
+
185
+ &nbsp;
186
+
187
+ [id="plugins-{type}s-{plugin}-clean_run"]
188
+ ===== `clean_run`
189
+
190
+ * Value type is <<boolean,boolean>>
191
+ * Default value is `false`
192
+
193
+ Whether the previous run state should be preserved
194
+
195
+ [id="plugins-{type}s-{plugin}-columns_charset"]
196
+ ===== `columns_charset`
197
+
198
+ * Value type is <<hash,hash>>
199
+ * Default value is `{}`
200
+
201
+ The character encoding for specific columns. This option will override the `:charset` option
202
+ for the specified columns.
203
+
204
+ Example:
205
+ [source,ruby]
206
+ -------------------------------------------------------
207
+ input {
208
+ jdbc {
209
+ ...
210
+ columns_charset => { "column0" => "ISO-8859-1" }
211
+ ...
212
+ }
213
+ }
214
+ -------------------------------------------------------
215
+ this will only convert column0 that has ISO-8859-1 as an original encoding.
216
+
217
+ [id="plugins-{type}s-{plugin}-connection_retry_attempts"]
218
+ ===== `connection_retry_attempts`
219
+
220
+ * Value type is <<number,number>>
221
+ * Default value is `1`
222
+
223
+ Maximum number of times to try connecting to database
224
+
225
+ [id="plugins-{type}s-{plugin}-connection_retry_attempts_wait_time"]
226
+ ===== `connection_retry_attempts_wait_time`
227
+
228
+ * Value type is <<number,number>>
229
+ * Default value is `0.5`
230
+
231
+ Number of seconds to sleep between connection attempts
232
+
233
+ [id="plugins-{type}s-{plugin}-jdbc_connection_string"]
234
+ ===== `jdbc_connection_string`
235
+
236
+ * This is a required setting.
237
+ * Value type is <<string,string>>
238
+ * There is no default value for this setting.
239
+
240
+ JDBC connection string
241
+
242
+ [id="plugins-{type}s-{plugin}-jdbc_default_timezone"]
243
+ ===== `jdbc_default_timezone`
244
+
245
+ * Value type is <<string,string>>
246
+ * There is no default value for this setting.
247
+
248
+ Timezone conversion.
249
+ SQL does not allow for timezone data in timestamp fields. This plugin will automatically
250
+ convert your SQL timestamp fields to Logstash timestamps, in relative UTC time in ISO8601 format.
251
+
252
+ Using this setting will manually assign a specified timezone offset, instead
253
+ of using the timezone setting of the local machine. You must use a canonical
254
+ timezone, *America/Denver*, for example.
255
+
256
+ [id="plugins-{type}s-{plugin}-jdbc_driver_class"]
257
+ ===== `jdbc_driver_class`
258
+
259
+ * This is a required setting.
260
+ * Value type is <<string,string>>
261
+ * There is no default value for this setting.
262
+
263
+ JDBC driver class to load, for exmaple, "org.apache.derby.jdbc.ClientDriver"
264
+ NB per https://github.com/logstash-plugins/logstash-input-jdbc/issues/43 if you are using
265
+ the Oracle JDBC driver (ojdbc6.jar) the correct `jdbc_driver_class` is `"Java::oracle.jdbc.driver.OracleDriver"`
266
+
267
+ [id="plugins-{type}s-{plugin}-jdbc_driver_library"]
268
+ ===== `jdbc_driver_library`
269
+
270
+ * Value type is <<string,string>>
271
+ * There is no default value for this setting.
272
+
273
+ Tentative of abstracting JDBC logic to a mixin
274
+ for potential reuse in other plugins (input/output)
275
+ This method is called when someone includes this module
276
+ Add these methods to the 'base' given.
277
+ JDBC driver library path to third party driver library. In case of multiple libraries being
278
+ required you can pass them separated by a comma.
279
+
280
+ If not provided, Plugin will look for the driver class in the Logstash Java classpath.
281
+
282
+ [id="plugins-{type}s-{plugin}-jdbc_fetch_size"]
283
+ ===== `jdbc_fetch_size`
284
+
285
+ * Value type is <<number,number>>
286
+ * There is no default value for this setting.
287
+
288
+ JDBC fetch size. if not provided, respective driver's default will be used
289
+
290
+ [id="plugins-{type}s-{plugin}-jdbc_page_size"]
291
+ ===== `jdbc_page_size`
292
+
293
+ * Value type is <<number,number>>
294
+ * Default value is `100000`
295
+
296
+ JDBC page size
297
+
298
+ [id="plugins-{type}s-{plugin}-jdbc_paging_enabled"]
299
+ ===== `jdbc_paging_enabled`
300
+
301
+ * Value type is <<boolean,boolean>>
302
+ * Default value is `false`
303
+
304
+ JDBC enable paging
305
+
306
+ This will cause a sql statement to be broken up into multiple queries.
307
+ Each query will use limits and offsets to collectively retrieve the full
308
+ result-set. The limit size is set with `jdbc_page_size`.
309
+
310
+ Be aware that ordering is not guaranteed between queries.
311
+
312
+ [id="plugins-{type}s-{plugin}-jdbc_password"]
313
+ ===== `jdbc_password`
314
+
315
+ * Value type is <<password,password>>
316
+ * There is no default value for this setting.
317
+
318
+ JDBC password
319
+
320
+ [id="plugins-{type}s-{plugin}-jdbc_password_filepath"]
321
+ ===== `jdbc_password_filepath`
322
+
323
+ * Value type is <<path,path>>
324
+ * There is no default value for this setting.
325
+
326
+ JDBC password filename
327
+
328
+ [id="plugins-{type}s-{plugin}-jdbc_pool_timeout"]
329
+ ===== `jdbc_pool_timeout`
330
+
331
+ * Value type is <<number,number>>
332
+ * Default value is `5`
333
+
334
+ Connection pool configuration.
335
+ The amount of seconds to wait to acquire a connection before raising a PoolTimeoutError (default 5)
336
+
337
+ [id="plugins-{type}s-{plugin}-jdbc_user"]
338
+ ===== `jdbc_user`
339
+
340
+ * This is a required setting.
341
+ * Value type is <<string,string>>
342
+ * There is no default value for this setting.
343
+
344
+ JDBC user
345
+
346
+ [id="plugins-{type}s-{plugin}-jdbc_validate_connection"]
347
+ ===== `jdbc_validate_connection`
348
+
349
+ * Value type is <<boolean,boolean>>
350
+ * Default value is `false`
351
+
352
+ Connection pool configuration.
353
+ Validate connection before use.
354
+
355
+ [id="plugins-{type}s-{plugin}-jdbc_validation_timeout"]
356
+ ===== `jdbc_validation_timeout`
357
+
358
+ * Value type is <<number,number>>
359
+ * Default value is `3600`
360
+
361
+ Connection pool configuration.
362
+ How often to validate a connection (in seconds)
363
+
364
+ [id="plugins-{type}s-{plugin}-last_run_metadata_path"]
365
+ ===== `last_run_metadata_path`
366
+
367
+ * Value type is <<string,string>>
368
+ * Default value is `"/home/ph/.logstash_jdbc_last_run"`
369
+
370
+ Path to file with last run time
371
+
372
+ [id="plugins-{type}s-{plugin}-lowercase_column_names"]
373
+ ===== `lowercase_column_names`
374
+
375
+ * Value type is <<boolean,boolean>>
376
+ * Default value is `true`
377
+
378
+ Whether to force the lowercasing of identifier fields
379
+
380
+ [id="plugins-{type}s-{plugin}-parameters"]
381
+ ===== `parameters`
382
+
383
+ * Value type is <<hash,hash>>
384
+ * Default value is `{}`
385
+
386
+ Hash of query parameter, for example `{ "target_id" => "321" }`
387
+
388
+ [id="plugins-{type}s-{plugin}-record_last_run"]
389
+ ===== `record_last_run`
390
+
391
+ * Value type is <<boolean,boolean>>
392
+ * Default value is `true`
393
+
394
+ Whether to save state or not in last_run_metadata_path
395
+
396
+ [id="plugins-{type}s-{plugin}-schedule"]
397
+ ===== `schedule`
398
+
399
+ * Value type is <<string,string>>
400
+ * There is no default value for this setting.
401
+
402
+ Schedule of when to periodically run statement, in Cron format
403
+ for example: "* * * * *" (execute query every minute, on the minute)
404
+
405
+ There is no schedule by default. If no schedule is given, then the statement is run
406
+ exactly once.
407
+
408
+ [id="plugins-{type}s-{plugin}-sequel_opts"]
409
+ ===== `sequel_opts`
410
+
411
+ * Value type is <<hash,hash>>
412
+ * Default value is `{}`
413
+
414
+ General/Vendor-specific Sequel configuration options.
415
+
416
+ An example of an optional connection pool configuration
417
+ max_connections - The maximum number of connections the connection pool
418
+
419
+ examples of vendor-specific options can be found in this
420
+ documentation page: https://github.com/jeremyevans/sequel/blob/master/doc/opening_databases.rdoc
421
+
422
+ [id="plugins-{type}s-{plugin}-sql_log_level"]
423
+ ===== `sql_log_level`
424
+
425
+ * Value can be any of: `fatal`, `error`, `warn`, `info`, `debug`
426
+ * Default value is `"info"`
427
+
428
+ Log level at which to log SQL queries, the accepted values are the common ones fatal, error, warn,
429
+ info and debug. The default value is info.
430
+
431
+ [id="plugins-{type}s-{plugin}-statement"]
432
+ ===== `statement`
433
+
434
+ * Value type is <<string,string>>
435
+ * There is no default value for this setting.
436
+
437
+ If undefined, Logstash will complain, even if codec is unused.
438
+ Statement to execute
439
+
440
+ To use parameters, use named parameter syntax.
441
+ For example:
442
+
443
+ [source, ruby]
444
+ -----------------------------------------------
445
+ "SELECT * FROM MYTABLE WHERE id = :target_id"
446
+ -----------------------------------------------
447
+
448
+ here, ":target_id" is a named parameter. You can configure named parameters
449
+ with the `parameters` setting.
450
+
451
+ [id="plugins-{type}s-{plugin}-statement_filepath"]
452
+ ===== `statement_filepath`
453
+
454
+ * Value type is <<path,path>>
455
+ * There is no default value for this setting.
456
+
457
+ Path of file containing statement to execute
458
+
459
+ [id="plugins-{type}s-{plugin}-tracking_column"]
460
+ ===== `tracking_column`
461
+
462
+ * Value type is <<string,string>>
463
+ * There is no default value for this setting.
464
+
465
+ If tracking column value rather than timestamp, the column whose value is to be tracked
466
+
467
+ [id="plugins-{type}s-{plugin}-tracking_column_type"]
468
+ ===== `tracking_column_type`
469
+
470
+ * Value can be any of: `numeric`, `timestamp`
471
+ * Default value is `"numeric"`
472
+
473
+ Type of tracking column. Currently only "numeric" and "timestamp"
474
+
475
+ [id="plugins-{type}s-{plugin}-use_column_value"]
476
+ ===== `use_column_value`
477
+
478
+ * Value type is <<boolean,boolean>>
479
+ * Default value is `false`
480
+
481
+ Use an incremental column value rather than a timestamp
482
+
483
+
484
+
485
+ [id="plugins-{type}s-{plugin}-common-options"]
486
+ include::{include_path}/{type}.asciidoc[]
@@ -4,7 +4,7 @@ require "logstash/namespace"
4
4
  require "logstash/plugin_mixins/jdbc"
5
5
  require "yaml" # persistence
6
6
 
7
- # This plugin was created as a way to ingest data in any database
7
+ # This plugin was created as a way to ingest data from any database
8
8
  # with a JDBC interface into Logstash. You can periodically schedule ingestion
9
9
  # using a cron syntax (see `schedule` setting) or run the query one time to load
10
10
  # data into Logstash. Each row in the resultset becomes a single event.
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-input-jdbc'
3
- s.version = '4.2.0'
3
+ s.version = '4.2.1'
4
4
  s.licenses = ['Apache License (2.0)']
5
5
  s.summary = "This example input streams a string at a definable interval."
6
6
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
@@ -10,7 +10,7 @@ Gem::Specification.new do |s|
10
10
  s.require_paths = ["lib"]
11
11
 
12
12
  # Files
13
- s.files = Dir['lib/**/*','spec/**/*','vendor/**/*','*.gemspec','*.md','CONTRIBUTORS','Gemfile','LICENSE','NOTICE.TXT']
13
+ s.files = Dir["lib/**/*","spec/**/*","*.gemspec","*.md","CONTRIBUTORS","Gemfile","LICENSE","NOTICE.TXT", "vendor/jar-dependencies/**/*.jar", "vendor/jar-dependencies/**/*.rb", "VERSION", "docs/**/*"]
14
14
  # Tests
15
15
  s.test_files = s.files.grep(%r{^(test|spec|features)/})
16
16
 
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-input-jdbc
3
3
  version: !ruby/object:Gem::Version
4
- version: 4.2.0
4
+ version: 4.2.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Elastic
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2017-04-28 00:00:00.000000000 Z
11
+ date: 2017-06-23 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement
@@ -153,6 +153,7 @@ files:
153
153
  - LICENSE
154
154
  - NOTICE.TXT
155
155
  - README.md
156
+ - docs/index.asciidoc
156
157
  - lib/logstash/inputs/jdbc.rb
157
158
  - lib/logstash/plugin_mixins/jdbc.rb
158
159
  - logstash-input-jdbc.gemspec