logstash-filter-json 3.0.2 → 3.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 75b6415ca4f107d636b1305a1cfa50f5fe6116a5
4
- data.tar.gz: 4b1adb7d1a1b8c24fc464dfcdd055b4d0372115c
3
+ metadata.gz: eaaa5dc9665555401807b85ab686e70d61cc5f6b
4
+ data.tar.gz: e32c2c03935902774be46e98a1ad12b2e7a7dda6
5
5
  SHA512:
6
- metadata.gz: 9c4057279fe3c6911553fc383883bc41deaccaee6f3cad0f56298d451d2bcb870176aa9ea0cf24c81c98acf1b4b11b675803444d9a419e41fe80baa907472fae
7
- data.tar.gz: aa96a6fd9b490b9967f62ef022ae380af6edd232a850f164496d81c41067623a5508c5b6fe6c3b00f3c35985c07aedc8c88d3e86e58896cbff54fc4e271c1a49
6
+ metadata.gz: ec227571f7f30cfe2dde812e1af56146d1582d9ab5a8797cd5019e8b50fa2e63f79dd0926e7cd64834c54c33dc6fa6c82884ab9f94a30697dbe635ada1083bc6
7
+ data.tar.gz: f81f408df31f56b35421ebb9e0b9f76a4cc9d4f86484ec9ad0a9d33114aeb63bdbebe7306489ecb8ee7c2ec478f95ed2dc7a7c80aa7531ffb21a04a27abf2e14
data/Gemfile CHANGED
@@ -1,4 +1,11 @@
1
1
  source 'https://rubygems.org'
2
2
 
3
- # Specify your gem's dependencies in logstash-mass_effect.gemspec
4
3
  gemspec
4
+
5
+ logstash_path = ENV["LOGSTASH_PATH"] || "../../logstash"
6
+ use_logstash_source = ENV["LOGSTASH_SOURCE"] && ENV["LOGSTASH_SOURCE"].to_s == "1"
7
+
8
+ if Dir.exist?(logstash_path) && use_logstash_source
9
+ gem 'logstash-core', :path => "#{logstash_path}/logstash-core"
10
+ gem 'logstash-core-plugin-api', :path => "#{logstash_path}/logstash-core-plugin-api"
11
+ end
@@ -0,0 +1,121 @@
1
+ :plugin: json
2
+ :type: filter
3
+
4
+ ///////////////////////////////////////////
5
+ START - GENERATED VARIABLES, DO NOT EDIT!
6
+ ///////////////////////////////////////////
7
+ :version: %VERSION%
8
+ :release_date: %RELEASE_DATE%
9
+ :changelog_url: %CHANGELOG_URL%
10
+ :include_path: ../../../../logstash/docs/include
11
+ ///////////////////////////////////////////
12
+ END - GENERATED VARIABLES, DO NOT EDIT!
13
+ ///////////////////////////////////////////
14
+
15
+ [id="plugins-{type}-{plugin}"]
16
+
17
+ === Json filter plugin
18
+
19
+ include::{include_path}/plugin_header.asciidoc[]
20
+
21
+ ==== Description
22
+
23
+ This is a JSON parsing filter. It takes an existing field which contains JSON and
24
+ expands it into an actual data structure within the Logstash event.
25
+
26
+ By default it will place the parsed JSON in the root (top level) of the Logstash event, but this
27
+ filter can be configured to place the JSON into any arbitrary event field, using the
28
+ `target` configuration.
29
+
30
+ This plugin has a few fallback scenario when something bad happen during the parsing of the event.
31
+ If the JSON parsing fails on the data, the event will be untouched and it will be tagged with a
32
+ `_jsonparsefailure` then you can use conditionals to clean the data. You can configured this tag with then
33
+ `tag_on_failure` option.
34
+
35
+ If the parsed data contains a `@timestamp` field, we will try to use it for the event's `@timestamp`, if the
36
+ parsing fails, the field will be renamed to `_@timestamp` and the event will be tagged with a
37
+ `_timestampparsefailure`.
38
+
39
+ [id="plugins-{type}s-{plugin}-options"]
40
+ ==== Json Filter Configuration Options
41
+
42
+ This plugin supports the following configuration options plus the <<plugins-{type}s-{plugin}-common-options>> described later.
43
+
44
+ [cols="<,<,<",options="header",]
45
+ |=======================================================================
46
+ |Setting |Input type|Required
47
+ | <<plugins-{type}s-{plugin}-skip_on_invalid_json>> |<<boolean,boolean>>|No
48
+ | <<plugins-{type}s-{plugin}-source>> |<<string,string>>|Yes
49
+ | <<plugins-{type}s-{plugin}-tag_on_failure>> |<<array,array>>|No
50
+ | <<plugins-{type}s-{plugin}-target>> |<<string,string>>|No
51
+ |=======================================================================
52
+
53
+ Also see <<plugins-{type}s-{plugin}-common-options>> for a list of options supported by all
54
+ filter plugins.
55
+
56
+ &nbsp;
57
+
58
+ [id="plugins-{type}s-{plugin}-skip_on_invalid_json"]
59
+ ===== `skip_on_invalid_json`
60
+
61
+ * Value type is <<boolean,boolean>>
62
+ * Default value is `false`
63
+
64
+ Allow to skip filter on invalid json (allows to handle json and non-json data without warnings)
65
+
66
+ [id="plugins-{type}s-{plugin}-source"]
67
+ ===== `source`
68
+
69
+ * This is a required setting.
70
+ * Value type is <<string,string>>
71
+ * There is no default value for this setting.
72
+
73
+ The configuration for the JSON filter:
74
+ [source,ruby]
75
+ source => source_field
76
+
77
+ For example, if you have JSON data in the `message` field:
78
+ [source,ruby]
79
+ filter {
80
+ json {
81
+ source => "message"
82
+ }
83
+ }
84
+
85
+ The above would parse the json from the `message` field
86
+
87
+ [id="plugins-{type}s-{plugin}-tag_on_failure"]
88
+ ===== `tag_on_failure`
89
+
90
+ * Value type is <<array,array>>
91
+ * Default value is `["_jsonparsefailure"]`
92
+
93
+ Append values to the `tags` field when there has been no
94
+ successful match
95
+
96
+ [id="plugins-{type}s-{plugin}-target"]
97
+ ===== `target`
98
+
99
+ * Value type is <<string,string>>
100
+ * There is no default value for this setting.
101
+
102
+ Define the target field for placing the parsed data. If this setting is
103
+ omitted, the JSON data will be stored at the root (top level) of the event.
104
+
105
+ For example, if you want the data to be put in the `doc` field:
106
+ [source,ruby]
107
+ filter {
108
+ json {
109
+ target => "doc"
110
+ }
111
+ }
112
+
113
+ JSON in the value of the `source` field will be expanded into a
114
+ data structure in the `target` field.
115
+
116
+ NOTE: if the `target` field already exists, it will be overwritten!
117
+
118
+
119
+
120
+ [id="plugins-{type}s-{plugin}-common-options"]
121
+ include::{include_path}/{type}.asciidoc[]
@@ -1,7 +1,7 @@
1
1
  Gem::Specification.new do |s|
2
2
 
3
3
  s.name = 'logstash-filter-json'
4
- s.version = '3.0.2'
4
+ s.version = '3.0.3'
5
5
  s.licenses = ['Apache License (2.0)']
6
6
  s.summary = "This is a JSON parsing filter. It takes an existing field which contains JSON and expands it into an actual data structure within the Logstash event."
7
7
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
@@ -11,7 +11,7 @@ Gem::Specification.new do |s|
11
11
  s.require_paths = ["lib"]
12
12
 
13
13
  # Files
14
- s.files = Dir['lib/**/*','spec/**/*','vendor/**/*','*.gemspec','*.md','CONTRIBUTORS','Gemfile','LICENSE','NOTICE.TXT']
14
+ s.files = Dir["lib/**/*","spec/**/*","*.gemspec","*.md","CONTRIBUTORS","Gemfile","LICENSE","NOTICE.TXT", "vendor/jar-dependencies/**/*.jar", "vendor/jar-dependencies/**/*.rb", "VERSION", "docs/**/*"]
15
15
 
16
16
  # Tests
17
17
  s.test_files = s.files.grep(%r{^(test|spec|features)/})
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-filter-json
3
3
  version: !ruby/object:Gem::Version
4
- version: 3.0.2
4
+ version: 3.0.3
5
5
  platform: ruby
6
6
  authors:
7
7
  - Elastic
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2016-07-14 00:00:00.000000000 Z
11
+ date: 2017-06-23 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement
@@ -56,6 +56,7 @@ files:
56
56
  - LICENSE
57
57
  - NOTICE.TXT
58
58
  - README.md
59
+ - docs/index.asciidoc
59
60
  - lib/logstash/filters/json.rb
60
61
  - logstash-filter-json.gemspec
61
62
  - spec/filters/json_spec.rb
@@ -81,7 +82,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
81
82
  version: '0'
82
83
  requirements: []
83
84
  rubyforge_project:
84
- rubygems_version: 2.6.3
85
+ rubygems_version: 2.4.8
85
86
  signing_key:
86
87
  specification_version: 4
87
88
  summary: This is a JSON parsing filter. It takes an existing field which contains JSON and expands it into an actual data structure within the Logstash event.