logstash-output-csv 3.0.9 → 3.0.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: b7a05140cdafe0cf40199b965734aec4da79a70513ee9f3d9085af089ce126dc
4
- data.tar.gz: 047b1370413123253334129b37f504ba85f71a29e99e769dc333525e79471a0f
3
+ metadata.gz: ecdec45db80595cc81698a806071c64593c61f4750dd5ce3bca94dce10a55b3f
4
+ data.tar.gz: ac29398e62fe6942440fd847be257f8ff6c737f812ae003743688a189fba29fd
5
5
  SHA512:
6
- metadata.gz: a7c0372851b2b7ca7d6989f17846a6658636e439b6328d72182fb6da1ac9ea5875686c6d74aa8be928bc327a9f01408151944a98ade2d51e9fdf68dcc2ef1690
7
- data.tar.gz: 9e7a658e67f5207c8d58ed8234ef61bcdd9c65171ad918df536753b59a91573f26e8fad18949c4bbce785a085a635d516e8eda9f4dfe4268b2b730e7028af3c3
6
+ metadata.gz: 7f3902673a817ae7c836d5bc600bee0d93f5bf6735bd32eb72373f065c0ff5ebde69461d306c10cb16eadc96c37e6568038652471743ee13d093efd1e83cc408
7
+ data.tar.gz: cd130b35764cc66b0ac070d49ed0d24ff701524dafcead9b45179a97c80a8a35e6fcac3ae48dba05bed6a51c0a16ec4fd2a355e90b9261d3f55aa0f8bec9278f
data/CHANGELOG.md CHANGED
@@ -1,3 +1,9 @@
1
+ ## 3.0.11
2
+ - Docs: Correct code snippet [#28](https://github.com/logstash-plugins/logstash-output-csv/pull/28)
3
+
4
+ ## 3.0.10
5
+ - Extend `spreadsheet_safe` prefix guard to '-', '+', and '@' [#27](https://github.com/logstash-plugins/logstash-output-csv/pull/27)
6
+
1
7
  ## 3.0.9
2
8
  - Fix: updates syntax to JRuby 9.4 [#25](https://github.com/logstash-plugins/logstash-output-csv/pull/25)
3
9
 
data/docs/index.asciidoc CHANGED
@@ -147,7 +147,7 @@ You can customise the line format using the `line` codec like:
147
147
  [source,ruby]
148
148
  -----
149
149
  output {
150
- file {
150
+ csv {
151
151
  path => ...
152
152
  codec => line { format => "custom format: %{message}"}
153
153
  }
@@ -180,4 +180,4 @@ may not make the values safe in your spreadsheet application
180
180
  include::{include_path}/{type}.asciidoc[]
181
181
 
182
182
 
183
- :default_codec!:
183
+ :default_codec!:
@@ -66,6 +66,6 @@ class LogStash::Outputs::CSV < LogStash::Outputs::File
66
66
 
67
67
  private
68
68
  def escape_csv(val)
69
- (spreadsheet_safe && val.is_a?(String) && val.start_with?("=")) ? "'#{val}" : val
69
+ (spreadsheet_safe && val.is_a?(String) && val.start_with?(/[=+\-@]/)) ? "'#{val}" : val
70
70
  end
71
71
  end # class LogStash::Outputs::CSV
@@ -1,7 +1,7 @@
1
1
  Gem::Specification.new do |s|
2
2
 
3
3
  s.name = 'logstash-output-csv'
4
- s.version = '3.0.9'
4
+ s.version = '3.0.11'
5
5
  s.licenses = ['Apache License (2.0)']
6
6
  s.summary = "Writes events to disk in a delimited format"
7
7
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
@@ -22,9 +22,6 @@ Gem::Specification.new do |s|
22
22
  # Gem dependencies
23
23
  s.add_runtime_dependency "logstash-core-plugin-api", ">= 1.60", "<= 2.99"
24
24
 
25
- s.add_runtime_dependency 'logstash-input-generator'
26
25
  s.add_runtime_dependency 'logstash-output-file'
27
- s.add_runtime_dependency 'logstash-filter-json'
28
26
  s.add_development_dependency 'logstash-devutils'
29
- s.add_development_dependency 'insist'
30
27
  end
@@ -1,319 +1,171 @@
1
1
  require "csv"
2
2
  require "tempfile"
3
3
  require "logstash/devutils/rspec/spec_helper"
4
- require "insist"
5
4
  require "logstash/outputs/csv"
6
5
 
7
6
  describe LogStash::Outputs::CSV do
8
7
 
8
+ subject { described_class.new(options) }
9
9
 
10
- describe "Write a single field to a csv file" do
11
- tmpfile = Tempfile.new('logstash-spec-output-csv')
12
- config <<-CONFIG
13
- input {
14
- generator {
15
- add_field => ["foo","bar"]
16
- count => 1
17
- }
18
- }
19
- output {
20
- csv {
21
- path => "#{tmpfile.path}"
22
- fields => "foo"
23
- }
24
- }
25
- CONFIG
10
+ let(:tmpfile) { Tempfile.new('logstash-spec-output-csv').path }
11
+ let(:output) { File.readlines(tmpfile) }
12
+ let(:csv_output) { CSV.read(tmpfile) }
26
13
 
27
- agent do
28
- lines = File.readlines(tmpfile.path)
29
- insist {lines.count} == 1
30
- insist {lines[0]} == "bar\n"
31
- end
14
+ before(:each) do
15
+ subject.register
16
+ subject.multi_receive(events)
32
17
  end
33
18
 
34
- describe "write multiple fields and lines to a csv file" do
35
- tmpfile = Tempfile.new('logstash-spec-output-csv')
36
- config <<-CONFIG
37
- input {
38
- generator {
39
- add_field => ["foo", "bar", "baz", "quux"]
40
- count => 2
41
- }
42
- }
43
- output {
44
- csv {
45
- path => "#{tmpfile.path}"
46
- fields => ["foo", "baz"]
47
- }
48
- }
49
- CONFIG
50
-
51
- agent do
52
- lines = File.readlines(tmpfile.path)
53
- insist {lines.count} == 2
54
- insist {lines[0]} == "bar,quux\n"
55
- insist {lines[1]} == "bar,quux\n"
19
+ context "when configured with a single field" do
20
+ let(:events) { [ LogStash::Event.new("foo" => "bar") ] }
21
+ let(:options) { { "path" => tmpfile, "fields" => "foo" } }
22
+ it "writes a single field to a csv file" do
23
+ expect(output.count).to eq(1)
24
+ expect(output.first).to eq("bar\n")
56
25
  end
57
26
  end
58
27
 
59
- describe "missing event fields are empty in csv" do
60
- tmpfile = Tempfile.new('logstash-spec-output-csv')
61
- config <<-CONFIG
62
- input {
63
- generator {
64
- add_field => ["foo","bar", "baz", "quux"]
65
- count => 1
66
- }
67
- }
68
- output {
69
- csv {
70
- path => "#{tmpfile.path}"
71
- fields => ["foo", "not_there", "baz"]
72
- }
73
- }
74
- CONFIG
75
-
76
- agent do
77
- lines = File.readlines(tmpfile.path)
78
- insist {lines.count} == 1
79
- insist {lines[0]} == "bar,,quux\n"
28
+ context "when receiving multiple events with multiple fields" do
29
+ let(:events) do
30
+ [ LogStash::Event.new("foo" => "bar", "baz" => "quux"),
31
+ LogStash::Event.new("foo" => "bar", "baz" => "quux") ]
32
+ end
33
+ let(:options) { { "path" => tmpfile, "fields" => ["foo", "baz"] } }
34
+ it "writes a line per event " do
35
+ expect(output.count).to eq(2)
36
+ end
37
+ it "writes configured fields for each line" do
38
+ expect(output[0]).to eq("bar,quux\n")
39
+ expect(output[1]).to eq("bar,quux\n")
80
40
  end
81
41
  end
82
42
 
83
- describe "commas are quoted properly" do
84
- tmpfile = Tempfile.new('logstash-spec-output-csv')
85
- config <<-CONFIG
86
- input {
87
- generator {
88
- add_field => ["foo","one,two", "baz", "quux"]
89
- count => 1
90
- }
91
- }
92
- output {
93
- csv {
94
- path => "#{tmpfile.path}"
95
- fields => ["foo", "baz"]
96
- }
97
- }
98
- CONFIG
43
+ context "with missing event fields" do
44
+ let(:events) do
45
+ [ LogStash::Event.new("foo" => "bar", "baz" => "quux") ]
46
+ end
47
+ let(:options) { { "path" => tmpfile, "fields" => ["foo", "not_there", "baz"] } }
99
48
 
100
- agent do
101
- lines = File.readlines(tmpfile.path)
102
- insist {lines.count} == 1
103
- insist {lines[0]} == "\"one,two\",quux\n"
49
+ it "skips on the resulting line" do
50
+ expect(output.size).to eq(1)
51
+ expect(output[0]).to eq("bar,,quux\n")
104
52
  end
105
53
  end
106
54
 
107
- describe "new lines are quoted properly" do
108
- tmpfile = Tempfile.new('logstash-spec-output-csv')
109
- config <<-CONFIG
110
- input {
111
- generator {
112
- add_field => ["foo","one\ntwo", "baz", "quux"]
113
- count => 1
114
- }
115
- }
116
- output {
117
- csv {
118
- path => "#{tmpfile.path}"
119
- fields => ["foo", "baz"]
120
- }
121
- }
122
- CONFIG
123
-
124
- agent do
125
- lines = CSV.read(tmpfile.path)
126
- insist {lines.count} == 1
127
- insist {lines[0][0]} == "one\ntwo"
55
+ context "when field values have commas" do
56
+ let(:events) do
57
+ [ LogStash::Event.new("foo" => "one,two", "baz" => "quux") ]
58
+ end
59
+ let(:options) { { "path" => tmpfile, "fields" => ["foo", "baz"] } }
60
+ it "correctly escapes them" do
61
+ expect(output.size).to eq(1)
62
+ expect(output[0]).to eq("\"one,two\",quux\n")
128
63
  end
129
64
  end
130
65
 
131
- describe "fields that are are objects are written as JSON" do
132
- tmpfile = Tempfile.new('logstash-spec-output-csv')
133
- config <<-CONFIG
134
- input {
135
- generator {
136
- message => '{"foo":{"one":"two"},"baz": "quux"}'
137
- count => 1
138
- }
139
- }
140
- filter {
141
- json { source => "message"}
142
- }
143
- output {
144
- csv {
145
- path => "#{tmpfile.path}"
146
- fields => ["foo", "baz"]
147
- }
148
- }
149
- CONFIG
150
-
151
- agent do
152
- lines = CSV.read(tmpfile.path)
153
- insist {lines.count} == 1
154
- insist {lines[0][0]} == '{"one":"two"}'
66
+ context "when fields contain special characters" do
67
+ let(:events) do
68
+ [ LogStash::Event.new("foo" => 'one\ntwo', "baz" => "quux") ]
69
+ end
70
+ let(:options) { { "path" => tmpfile, "fields" => ["foo", "baz"] } }
71
+ it "correctly escapes them" do
72
+ expect(csv_output.size).to eq(1)
73
+ expect(csv_output[0]).to eq(['one\ntwo', 'quux'])
155
74
  end
156
75
  end
157
76
 
158
- describe "can address nested field using field reference syntax" do
159
- tmpfile = Tempfile.new('logstash-spec-output-csv')
160
- config <<-CONFIG
161
- input {
162
- generator {
163
- message => '{"foo":{"one":"two"},"baz": "quux"}'
164
- count => 1
165
- }
166
- }
167
- filter {
168
- json { source => "message"}
169
- }
170
- output {
171
- csv {
172
- path => "#{tmpfile.path}"
173
- fields => ["[foo][one]", "baz"]
174
- }
175
- }
176
- CONFIG
77
+ context "fields that contain objects" do
78
+ let(:events) do
79
+ [ LogStash::Event.new("foo" => {"one" => "two"}, "baz" => "quux") ]
80
+ end
81
+ let(:options) { { "path" => tmpfile, "fields" => ["foo", "baz"] } }
177
82
 
178
- agent do
179
- lines = CSV.read(tmpfile.path)
180
- insist {lines.count} == 1
181
- insist {lines[0][0]} == "two"
182
- insist {lines[0][1]} == "quux"
83
+ it "are written as json" do
84
+ expect(csv_output.size).to eq(1)
85
+ expect(csv_output[0][0]).to eq('{"one":"two"}')
183
86
  end
184
87
  end
88
+ context "with address nested field" do
89
+ let(:events) do
90
+ [ LogStash::Event.new("foo" => {"one" => "two"}, "baz" => "quux") ]
91
+ end
92
+ let(:options) { { "path" => tmpfile, "fields" => ["[foo][one]", "baz"] } }
185
93
 
186
- describe "missing nested field is blank" do
187
- tmpfile = Tempfile.new('logstash-spec-output-csv')
188
- config <<-CONFIG
189
- input {
190
- generator {
191
- message => '{"foo":{"one":"two"},"baz": "quux"}'
192
- count => 1
193
- }
194
- }
195
- filter {
196
- json { source => "message"}
197
- }
198
- output {
199
- csv {
200
- path => "#{tmpfile.path}"
201
- fields => ["[foo][missing]", "baz"]
202
- }
203
- }
204
- CONFIG
205
-
206
- agent do
207
- lines = File.readlines(tmpfile.path)
208
- insist {lines.count} == 1
209
- insist {lines[0]} == ",quux\n"
94
+ it "are referenced using field references" do
95
+ expect(csv_output.size).to eq(1)
96
+ expect(csv_output[0][0]).to eq('two')
97
+ expect(csv_output[0][1]).to eq('quux')
210
98
  end
211
99
  end
212
100
 
213
- describe "can choose field seperator" do
214
- tmpfile = Tempfile.new('logstash-spec-output-csv')
215
- config <<-CONFIG
216
- input {
217
- generator {
218
- message => '{"foo":"one","bar": "two"}'
219
- count => 1
220
- }
221
- }
222
- filter {
223
- json { source => "message"}
224
- }
225
- output {
226
- csv {
227
- path => "#{tmpfile.path}"
228
- fields => ["foo", "bar"]
229
- csv_options => {"col_sep" => "|"}
230
- }
231
- }
232
- CONFIG
101
+ context "missing nested field" do
102
+ let(:events) do
103
+ [ LogStash::Event.new("foo" => {"one" => "two"}, "baz" => "quux") ]
104
+ end
105
+ let(:options) { { "path" => tmpfile, "fields" => ["[foo][missing]", "baz"] } }
233
106
 
234
- agent do
235
- lines = File.readlines(tmpfile.path)
236
- insist {lines.count} == 1
237
- insist {lines[0]} == "one|two\n"
107
+ it "are blank" do
108
+ expect(output.size).to eq(1)
109
+ expect(output[0]).to eq(",quux\n")
238
110
  end
239
111
  end
240
- describe "can choose line seperator" do
241
- tmpfile = Tempfile.new('logstash-spec-output-csv')
242
- config <<-CONFIG
243
- input {
244
- generator {
245
- message => '{"foo":"one","bar": "two"}'
246
- count => 2
247
- }
248
- }
249
- filter {
250
- json { source => "message"}
251
- }
252
- output {
253
- csv {
254
- path => "#{tmpfile.path}"
255
- fields => ["foo", "bar"]
256
- csv_options => {"col_sep" => "|" "row_sep" => "\t"}
257
- }
258
- }
259
- CONFIG
260
112
 
261
- agent do
262
- lines = File.readlines(tmpfile.path)
263
- insist {lines.count} == 1
264
- insist {lines[0]} == "one|two\tone|two\t"
113
+ describe "field separator" do
114
+ let(:events) do
115
+ [ LogStash::Event.new("foo" => "one", "baz" => "two") ]
116
+ end
117
+ let(:options) { { "path" => tmpfile, "fields" => ["foo", "baz"], "csv_options" => {"col_sep" => "|" } } }
118
+
119
+ it "uses separator in output" do
120
+ expect(output.size).to eq(1)
121
+ expect(output[0]).to eq("one|two\n")
265
122
  end
266
123
  end
267
124
 
268
- describe "can escape rogue values" do
269
- tmpfile = Tempfile.new('logstash-spec-output-csv')
270
- config <<-CONFIG
271
- input {
272
- generator {
273
- add_field => ["foo","1+1", "baz", "=1+1"]
274
- count => 1
275
- }
276
- }
277
- output {
278
- csv {
279
- path => "#{tmpfile.path}"
280
- fields => ["foo", "baz"]
281
- }
282
- }
283
- CONFIG
125
+ describe "line seperator" do
126
+ let(:events) do
127
+ [ LogStash::Event.new("foo" => "one", "baz" => "two"),
128
+ LogStash::Event.new("foo" => "one", "baz" => "two") ]
129
+ end
130
+ let(:options) { { "path" => tmpfile, "fields" => ["foo", "baz"], "csv_options" => {"col_sep" => "|", "row_sep" => "\t" } } }
284
131
 
285
- agent do
286
- lines = CSV.read(tmpfile.path)
287
- insist {lines.count} == 1
288
- insist {lines[0][0]} == "1+1"
289
- insist {lines[0][1]} == "'=1+1"
132
+ it "uses separator in output" do
133
+ expect(output.size).to eq(1)
134
+ expect(output[0]).to eq("one|two\tone|two\t")
290
135
  end
291
136
  end
292
137
 
293
- describe "can turn off escaping rogue values" do
294
- tmpfile = Tempfile.new('logstash-spec-output-csv')
295
- config <<-CONFIG
296
- input {
297
- generator {
298
- add_field => ["foo","1+1", "baz", "=1+1"]
299
- count => 1
300
- }
138
+ context "with rogue values" do
139
+ let(:event_data) do
140
+ {
141
+ "f1" => "1+1",
142
+ "f2" => "=1+1",
143
+ "f3" => "+1+1",
144
+ "f4" => "-1+1",
145
+ "f5" => "@1+1"
301
146
  }
302
- output {
303
- csv {
304
- path => "#{tmpfile.path}"
305
- spreadsheet_safe => false
306
- fields => ["foo", "baz"]
307
- }
308
- }
309
- CONFIG
147
+ end
148
+ let(:events) do
149
+ [ LogStash::Event.new(event_data) ]
150
+ end
310
151
 
311
- agent do
312
- lines = CSV.read(tmpfile.path)
313
- insist {lines.count} == 1
314
- insist {lines[0][0]} == "1+1"
315
- insist {lines[0][1]} == "=1+1"
152
+ let(:options) { { "path" => tmpfile, "fields" => ["f1", "f2", "f3", "f4", "f5"] } }
153
+ it "escapes them correctly" do
154
+ expect(csv_output.size).to eq(1)
155
+ expect(csv_output[0][0]).to eq("1+1")
156
+ expect(csv_output[0][1]).to eq("'=1+1")
157
+ expect(csv_output[0][2]).to eq("'+1+1")
158
+ expect(csv_output[0][3]).to eq("'-1+1")
159
+ expect(csv_output[0][4]).to eq("'@1+1")
316
160
  end
317
- end
318
161
 
162
+ context "when escaping is turned off" do
163
+ let(:options) { super().merge("spreadsheet_safe" => false) }
164
+ it "doesn't escapes values" do
165
+ expect(csv_output.size).to eq(1)
166
+ expect(csv_output[0][0]).to eq("1+1")
167
+ expect(csv_output[0][1]).to eq("=1+1")
168
+ end
169
+ end
170
+ end
319
171
  end
metadata CHANGED
@@ -1,16 +1,16 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-output-csv
3
3
  version: !ruby/object:Gem::Version
4
- version: 3.0.9
4
+ version: 3.0.11
5
5
  platform: ruby
6
6
  authors:
7
7
  - Elastic
8
- autorequire:
9
8
  bindir: bin
10
9
  cert_chain: []
11
- date: 2023-06-14 00:00:00.000000000 Z
10
+ date: 2025-10-10 00:00:00.000000000 Z
12
11
  dependencies:
13
12
  - !ruby/object:Gem::Dependency
13
+ name: logstash-core-plugin-api
14
14
  requirement: !ruby/object:Gem::Requirement
15
15
  requirements:
16
16
  - - ">="
@@ -19,9 +19,8 @@ dependencies:
19
19
  - - "<="
20
20
  - !ruby/object:Gem::Version
21
21
  version: '2.99'
22
- name: logstash-core-plugin-api
23
- prerelease: false
24
22
  type: :runtime
23
+ prerelease: false
25
24
  version_requirements: !ruby/object:Gem::Requirement
26
25
  requirements:
27
26
  - - ">="
@@ -31,70 +30,28 @@ dependencies:
31
30
  - !ruby/object:Gem::Version
32
31
  version: '2.99'
33
32
  - !ruby/object:Gem::Dependency
34
- requirement: !ruby/object:Gem::Requirement
35
- requirements:
36
- - - ">="
37
- - !ruby/object:Gem::Version
38
- version: '0'
39
- name: logstash-input-generator
40
- prerelease: false
41
- type: :runtime
42
- version_requirements: !ruby/object:Gem::Requirement
43
- requirements:
44
- - - ">="
45
- - !ruby/object:Gem::Version
46
- version: '0'
47
- - !ruby/object:Gem::Dependency
48
- requirement: !ruby/object:Gem::Requirement
49
- requirements:
50
- - - ">="
51
- - !ruby/object:Gem::Version
52
- version: '0'
53
33
  name: logstash-output-file
54
- prerelease: false
55
- type: :runtime
56
- version_requirements: !ruby/object:Gem::Requirement
57
- requirements:
58
- - - ">="
59
- - !ruby/object:Gem::Version
60
- version: '0'
61
- - !ruby/object:Gem::Dependency
62
34
  requirement: !ruby/object:Gem::Requirement
63
35
  requirements:
64
36
  - - ">="
65
37
  - !ruby/object:Gem::Version
66
38
  version: '0'
67
- name: logstash-filter-json
68
- prerelease: false
69
39
  type: :runtime
70
- version_requirements: !ruby/object:Gem::Requirement
71
- requirements:
72
- - - ">="
73
- - !ruby/object:Gem::Version
74
- version: '0'
75
- - !ruby/object:Gem::Dependency
76
- requirement: !ruby/object:Gem::Requirement
77
- requirements:
78
- - - ">="
79
- - !ruby/object:Gem::Version
80
- version: '0'
81
- name: logstash-devutils
82
40
  prerelease: false
83
- type: :development
84
41
  version_requirements: !ruby/object:Gem::Requirement
85
42
  requirements:
86
43
  - - ">="
87
44
  - !ruby/object:Gem::Version
88
45
  version: '0'
89
46
  - !ruby/object:Gem::Dependency
47
+ name: logstash-devutils
90
48
  requirement: !ruby/object:Gem::Requirement
91
49
  requirements:
92
50
  - - ">="
93
51
  - !ruby/object:Gem::Version
94
52
  version: '0'
95
- name: insist
96
- prerelease: false
97
53
  type: :development
54
+ prerelease: false
98
55
  version_requirements: !ruby/object:Gem::Requirement
99
56
  requirements:
100
57
  - - ">="
@@ -124,7 +81,6 @@ licenses:
124
81
  metadata:
125
82
  logstash_plugin: 'true'
126
83
  logstash_group: output
127
- post_install_message:
128
84
  rdoc_options: []
129
85
  require_paths:
130
86
  - lib
@@ -139,8 +95,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
139
95
  - !ruby/object:Gem::Version
140
96
  version: '0'
141
97
  requirements: []
142
- rubygems_version: 3.2.33
143
- signing_key:
98
+ rubygems_version: 3.6.3
144
99
  specification_version: 4
145
100
  summary: Writes events to disk in a delimited format
146
101
  test_files: