logstash-output-csv 3.0.9 → 3.0.10

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: b7a05140cdafe0cf40199b965734aec4da79a70513ee9f3d9085af089ce126dc
4
- data.tar.gz: 047b1370413123253334129b37f504ba85f71a29e99e769dc333525e79471a0f
3
+ metadata.gz: 0ff16b853ea563deea07a1e78f6ad9b6f677c77bf3857a9c79efc5aba8951b60
4
+ data.tar.gz: 6e2285954e7e79beb4c2d7f539355e6a03f019cfcf659c420d795c4db4f6a2ed
5
5
  SHA512:
6
- metadata.gz: a7c0372851b2b7ca7d6989f17846a6658636e439b6328d72182fb6da1ac9ea5875686c6d74aa8be928bc327a9f01408151944a98ade2d51e9fdf68dcc2ef1690
7
- data.tar.gz: 9e7a658e67f5207c8d58ed8234ef61bcdd9c65171ad918df536753b59a91573f26e8fad18949c4bbce785a085a635d516e8eda9f4dfe4268b2b730e7028af3c3
6
+ metadata.gz: 617febffa7ab5a19acaf626e3979e7a54e2dd9901b19e4e627b1df37643ce79c5f1fecc2b81937e7fd217422c8f37d160a8be2201ff7e6c266de01f26c6c645c
7
+ data.tar.gz: 38d6ec6b6bb82b01cf9656f5532e7c1784118c439c9ee5d385d1bead7393a10024d1489fb500b30b4d70290d0a4b4c65afa66fa3b8f23b494f742c92613ac610
data/CHANGELOG.md CHANGED
@@ -1,3 +1,6 @@
1
+ ## 3.0.10
2
+ - Extend `spreadsheet_safe` prefix guard to '-', '+', and '@' [#27](https://github.com/logstash-plugins/logstash-output-csv/pull/27)
3
+
1
4
  ## 3.0.9
2
5
  - Fix: updates syntax to JRuby 9.4 [#25](https://github.com/logstash-plugins/logstash-output-csv/pull/25)
3
6
 
@@ -66,6 +66,6 @@ class LogStash::Outputs::CSV < LogStash::Outputs::File
66
66
 
67
67
  private
68
68
  def escape_csv(val)
69
- (spreadsheet_safe && val.is_a?(String) && val.start_with?("=")) ? "'#{val}" : val
69
+ (spreadsheet_safe && val.is_a?(String) && val.start_with?(/[=+\-@]/)) ? "'#{val}" : val
70
70
  end
71
71
  end # class LogStash::Outputs::CSV
@@ -1,7 +1,7 @@
1
1
  Gem::Specification.new do |s|
2
2
 
3
3
  s.name = 'logstash-output-csv'
4
- s.version = '3.0.9'
4
+ s.version = '3.0.10'
5
5
  s.licenses = ['Apache License (2.0)']
6
6
  s.summary = "Writes events to disk in a delimited format"
7
7
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
@@ -22,9 +22,6 @@ Gem::Specification.new do |s|
22
22
  # Gem dependencies
23
23
  s.add_runtime_dependency "logstash-core-plugin-api", ">= 1.60", "<= 2.99"
24
24
 
25
- s.add_runtime_dependency 'logstash-input-generator'
26
25
  s.add_runtime_dependency 'logstash-output-file'
27
- s.add_runtime_dependency 'logstash-filter-json'
28
26
  s.add_development_dependency 'logstash-devutils'
29
- s.add_development_dependency 'insist'
30
27
  end
@@ -1,319 +1,171 @@
1
1
  require "csv"
2
2
  require "tempfile"
3
3
  require "logstash/devutils/rspec/spec_helper"
4
- require "insist"
5
4
  require "logstash/outputs/csv"
6
5
 
7
6
  describe LogStash::Outputs::CSV do
8
7
 
8
+ subject { described_class.new(options) }
9
9
 
10
- describe "Write a single field to a csv file" do
11
- tmpfile = Tempfile.new('logstash-spec-output-csv')
12
- config <<-CONFIG
13
- input {
14
- generator {
15
- add_field => ["foo","bar"]
16
- count => 1
17
- }
18
- }
19
- output {
20
- csv {
21
- path => "#{tmpfile.path}"
22
- fields => "foo"
23
- }
24
- }
25
- CONFIG
10
+ let(:tmpfile) { Tempfile.new('logstash-spec-output-csv').path }
11
+ let(:output) { File.readlines(tmpfile) }
12
+ let(:csv_output) { CSV.read(tmpfile) }
26
13
 
27
- agent do
28
- lines = File.readlines(tmpfile.path)
29
- insist {lines.count} == 1
30
- insist {lines[0]} == "bar\n"
31
- end
14
+ before(:each) do
15
+ subject.register
16
+ subject.multi_receive(events)
32
17
  end
33
18
 
34
- describe "write multiple fields and lines to a csv file" do
35
- tmpfile = Tempfile.new('logstash-spec-output-csv')
36
- config <<-CONFIG
37
- input {
38
- generator {
39
- add_field => ["foo", "bar", "baz", "quux"]
40
- count => 2
41
- }
42
- }
43
- output {
44
- csv {
45
- path => "#{tmpfile.path}"
46
- fields => ["foo", "baz"]
47
- }
48
- }
49
- CONFIG
50
-
51
- agent do
52
- lines = File.readlines(tmpfile.path)
53
- insist {lines.count} == 2
54
- insist {lines[0]} == "bar,quux\n"
55
- insist {lines[1]} == "bar,quux\n"
19
+ context "when configured with a single field" do
20
+ let(:events) { [ LogStash::Event.new("foo" => "bar") ] }
21
+ let(:options) { { "path" => tmpfile, "fields" => "foo" } }
22
+ it "writes a single field to a csv file" do
23
+ expect(output.count).to eq(1)
24
+ expect(output.first).to eq("bar\n")
56
25
  end
57
26
  end
58
27
 
59
- describe "missing event fields are empty in csv" do
60
- tmpfile = Tempfile.new('logstash-spec-output-csv')
61
- config <<-CONFIG
62
- input {
63
- generator {
64
- add_field => ["foo","bar", "baz", "quux"]
65
- count => 1
66
- }
67
- }
68
- output {
69
- csv {
70
- path => "#{tmpfile.path}"
71
- fields => ["foo", "not_there", "baz"]
72
- }
73
- }
74
- CONFIG
75
-
76
- agent do
77
- lines = File.readlines(tmpfile.path)
78
- insist {lines.count} == 1
79
- insist {lines[0]} == "bar,,quux\n"
28
+ context "when receiving multiple events with multiple fields" do
29
+ let(:events) do
30
+ [ LogStash::Event.new("foo" => "bar", "baz" => "quux"),
31
+ LogStash::Event.new("foo" => "bar", "baz" => "quux") ]
32
+ end
33
+ let(:options) { { "path" => tmpfile, "fields" => ["foo", "baz"] } }
34
+ it "writes a line per event " do
35
+ expect(output.count).to eq(2)
36
+ end
37
+ it "writes configured fields for each line" do
38
+ expect(output[0]).to eq("bar,quux\n")
39
+ expect(output[1]).to eq("bar,quux\n")
80
40
  end
81
41
  end
82
42
 
83
- describe "commas are quoted properly" do
84
- tmpfile = Tempfile.new('logstash-spec-output-csv')
85
- config <<-CONFIG
86
- input {
87
- generator {
88
- add_field => ["foo","one,two", "baz", "quux"]
89
- count => 1
90
- }
91
- }
92
- output {
93
- csv {
94
- path => "#{tmpfile.path}"
95
- fields => ["foo", "baz"]
96
- }
97
- }
98
- CONFIG
43
+ context "with missing event fields" do
44
+ let(:events) do
45
+ [ LogStash::Event.new("foo" => "bar", "baz" => "quux") ]
46
+ end
47
+ let(:options) { { "path" => tmpfile, "fields" => ["foo", "not_there", "baz"] } }
99
48
 
100
- agent do
101
- lines = File.readlines(tmpfile.path)
102
- insist {lines.count} == 1
103
- insist {lines[0]} == "\"one,two\",quux\n"
49
+ it "skips on the resulting line" do
50
+ expect(output.size).to eq(1)
51
+ expect(output[0]).to eq("bar,,quux\n")
104
52
  end
105
53
  end
106
54
 
107
- describe "new lines are quoted properly" do
108
- tmpfile = Tempfile.new('logstash-spec-output-csv')
109
- config <<-CONFIG
110
- input {
111
- generator {
112
- add_field => ["foo","one\ntwo", "baz", "quux"]
113
- count => 1
114
- }
115
- }
116
- output {
117
- csv {
118
- path => "#{tmpfile.path}"
119
- fields => ["foo", "baz"]
120
- }
121
- }
122
- CONFIG
123
-
124
- agent do
125
- lines = CSV.read(tmpfile.path)
126
- insist {lines.count} == 1
127
- insist {lines[0][0]} == "one\ntwo"
55
+ context "when field values have commas" do
56
+ let(:events) do
57
+ [ LogStash::Event.new("foo" => "one,two", "baz" => "quux") ]
58
+ end
59
+ let(:options) { { "path" => tmpfile, "fields" => ["foo", "baz"] } }
60
+ it "correctly escapes them" do
61
+ expect(output.size).to eq(1)
62
+ expect(output[0]).to eq("\"one,two\",quux\n")
128
63
  end
129
64
  end
130
65
 
131
- describe "fields that are are objects are written as JSON" do
132
- tmpfile = Tempfile.new('logstash-spec-output-csv')
133
- config <<-CONFIG
134
- input {
135
- generator {
136
- message => '{"foo":{"one":"two"},"baz": "quux"}'
137
- count => 1
138
- }
139
- }
140
- filter {
141
- json { source => "message"}
142
- }
143
- output {
144
- csv {
145
- path => "#{tmpfile.path}"
146
- fields => ["foo", "baz"]
147
- }
148
- }
149
- CONFIG
150
-
151
- agent do
152
- lines = CSV.read(tmpfile.path)
153
- insist {lines.count} == 1
154
- insist {lines[0][0]} == '{"one":"two"}'
66
+ context "when fields contain special characters" do
67
+ let(:events) do
68
+ [ LogStash::Event.new("foo" => 'one\ntwo', "baz" => "quux") ]
69
+ end
70
+ let(:options) { { "path" => tmpfile, "fields" => ["foo", "baz"] } }
71
+ it "correctly escapes them" do
72
+ expect(csv_output.size).to eq(1)
73
+ expect(csv_output[0]).to eq(['one\ntwo', 'quux'])
155
74
  end
156
75
  end
157
76
 
158
- describe "can address nested field using field reference syntax" do
159
- tmpfile = Tempfile.new('logstash-spec-output-csv')
160
- config <<-CONFIG
161
- input {
162
- generator {
163
- message => '{"foo":{"one":"two"},"baz": "quux"}'
164
- count => 1
165
- }
166
- }
167
- filter {
168
- json { source => "message"}
169
- }
170
- output {
171
- csv {
172
- path => "#{tmpfile.path}"
173
- fields => ["[foo][one]", "baz"]
174
- }
175
- }
176
- CONFIG
77
+ context "fields that contain objects" do
78
+ let(:events) do
79
+ [ LogStash::Event.new("foo" => {"one" => "two"}, "baz" => "quux") ]
80
+ end
81
+ let(:options) { { "path" => tmpfile, "fields" => ["foo", "baz"] } }
177
82
 
178
- agent do
179
- lines = CSV.read(tmpfile.path)
180
- insist {lines.count} == 1
181
- insist {lines[0][0]} == "two"
182
- insist {lines[0][1]} == "quux"
83
+ it "are written as json" do
84
+ expect(csv_output.size).to eq(1)
85
+ expect(csv_output[0][0]).to eq('{"one":"two"}')
183
86
  end
184
87
  end
88
+ context "with address nested field" do
89
+ let(:events) do
90
+ [ LogStash::Event.new("foo" => {"one" => "two"}, "baz" => "quux") ]
91
+ end
92
+ let(:options) { { "path" => tmpfile, "fields" => ["[foo][one]", "baz"] } }
185
93
 
186
- describe "missing nested field is blank" do
187
- tmpfile = Tempfile.new('logstash-spec-output-csv')
188
- config <<-CONFIG
189
- input {
190
- generator {
191
- message => '{"foo":{"one":"two"},"baz": "quux"}'
192
- count => 1
193
- }
194
- }
195
- filter {
196
- json { source => "message"}
197
- }
198
- output {
199
- csv {
200
- path => "#{tmpfile.path}"
201
- fields => ["[foo][missing]", "baz"]
202
- }
203
- }
204
- CONFIG
205
-
206
- agent do
207
- lines = File.readlines(tmpfile.path)
208
- insist {lines.count} == 1
209
- insist {lines[0]} == ",quux\n"
94
+ it "are referenced using field references" do
95
+ expect(csv_output.size).to eq(1)
96
+ expect(csv_output[0][0]).to eq('two')
97
+ expect(csv_output[0][1]).to eq('quux')
210
98
  end
211
99
  end
212
100
 
213
- describe "can choose field seperator" do
214
- tmpfile = Tempfile.new('logstash-spec-output-csv')
215
- config <<-CONFIG
216
- input {
217
- generator {
218
- message => '{"foo":"one","bar": "two"}'
219
- count => 1
220
- }
221
- }
222
- filter {
223
- json { source => "message"}
224
- }
225
- output {
226
- csv {
227
- path => "#{tmpfile.path}"
228
- fields => ["foo", "bar"]
229
- csv_options => {"col_sep" => "|"}
230
- }
231
- }
232
- CONFIG
101
+ context "missing nested field" do
102
+ let(:events) do
103
+ [ LogStash::Event.new("foo" => {"one" => "two"}, "baz" => "quux") ]
104
+ end
105
+ let(:options) { { "path" => tmpfile, "fields" => ["[foo][missing]", "baz"] } }
233
106
 
234
- agent do
235
- lines = File.readlines(tmpfile.path)
236
- insist {lines.count} == 1
237
- insist {lines[0]} == "one|two\n"
107
+ it "are blank" do
108
+ expect(output.size).to eq(1)
109
+ expect(output[0]).to eq(",quux\n")
238
110
  end
239
111
  end
240
- describe "can choose line seperator" do
241
- tmpfile = Tempfile.new('logstash-spec-output-csv')
242
- config <<-CONFIG
243
- input {
244
- generator {
245
- message => '{"foo":"one","bar": "two"}'
246
- count => 2
247
- }
248
- }
249
- filter {
250
- json { source => "message"}
251
- }
252
- output {
253
- csv {
254
- path => "#{tmpfile.path}"
255
- fields => ["foo", "bar"]
256
- csv_options => {"col_sep" => "|" "row_sep" => "\t"}
257
- }
258
- }
259
- CONFIG
260
112
 
261
- agent do
262
- lines = File.readlines(tmpfile.path)
263
- insist {lines.count} == 1
264
- insist {lines[0]} == "one|two\tone|two\t"
113
+ describe "field separator" do
114
+ let(:events) do
115
+ [ LogStash::Event.new("foo" => "one", "baz" => "two") ]
116
+ end
117
+ let(:options) { { "path" => tmpfile, "fields" => ["foo", "baz"], "csv_options" => {"col_sep" => "|" } } }
118
+
119
+ it "uses separator in output" do
120
+ expect(output.size).to eq(1)
121
+ expect(output[0]).to eq("one|two\n")
265
122
  end
266
123
  end
267
124
 
268
- describe "can escape rogue values" do
269
- tmpfile = Tempfile.new('logstash-spec-output-csv')
270
- config <<-CONFIG
271
- input {
272
- generator {
273
- add_field => ["foo","1+1", "baz", "=1+1"]
274
- count => 1
275
- }
276
- }
277
- output {
278
- csv {
279
- path => "#{tmpfile.path}"
280
- fields => ["foo", "baz"]
281
- }
282
- }
283
- CONFIG
125
+ describe "line seperator" do
126
+ let(:events) do
127
+ [ LogStash::Event.new("foo" => "one", "baz" => "two"),
128
+ LogStash::Event.new("foo" => "one", "baz" => "two") ]
129
+ end
130
+ let(:options) { { "path" => tmpfile, "fields" => ["foo", "baz"], "csv_options" => {"col_sep" => "|", "row_sep" => "\t" } } }
284
131
 
285
- agent do
286
- lines = CSV.read(tmpfile.path)
287
- insist {lines.count} == 1
288
- insist {lines[0][0]} == "1+1"
289
- insist {lines[0][1]} == "'=1+1"
132
+ it "uses separator in output" do
133
+ expect(output.size).to eq(1)
134
+ expect(output[0]).to eq("one|two\tone|two\t")
290
135
  end
291
136
  end
292
137
 
293
- describe "can turn off escaping rogue values" do
294
- tmpfile = Tempfile.new('logstash-spec-output-csv')
295
- config <<-CONFIG
296
- input {
297
- generator {
298
- add_field => ["foo","1+1", "baz", "=1+1"]
299
- count => 1
300
- }
138
+ context "with rogue values" do
139
+ let(:event_data) do
140
+ {
141
+ "f1" => "1+1",
142
+ "f2" => "=1+1",
143
+ "f3" => "+1+1",
144
+ "f4" => "-1+1",
145
+ "f5" => "@1+1"
301
146
  }
302
- output {
303
- csv {
304
- path => "#{tmpfile.path}"
305
- spreadsheet_safe => false
306
- fields => ["foo", "baz"]
307
- }
308
- }
309
- CONFIG
147
+ end
148
+ let(:events) do
149
+ [ LogStash::Event.new(event_data) ]
150
+ end
310
151
 
311
- agent do
312
- lines = CSV.read(tmpfile.path)
313
- insist {lines.count} == 1
314
- insist {lines[0][0]} == "1+1"
315
- insist {lines[0][1]} == "=1+1"
152
+ let(:options) { { "path" => tmpfile, "fields" => ["f1", "f2", "f3", "f4", "f5"] } }
153
+ it "escapes them correctly" do
154
+ expect(csv_output.size).to eq(1)
155
+ expect(csv_output[0][0]).to eq("1+1")
156
+ expect(csv_output[0][1]).to eq("'=1+1")
157
+ expect(csv_output[0][2]).to eq("'+1+1")
158
+ expect(csv_output[0][3]).to eq("'-1+1")
159
+ expect(csv_output[0][4]).to eq("'@1+1")
316
160
  end
317
- end
318
161
 
162
+ context "when escaping is turned off" do
163
+ let(:options) { super().merge("spreadsheet_safe" => false) }
164
+ it "doesn't escapes values" do
165
+ expect(csv_output.size).to eq(1)
166
+ expect(csv_output[0][0]).to eq("1+1")
167
+ expect(csv_output[0][1]).to eq("=1+1")
168
+ end
169
+ end
170
+ end
319
171
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-output-csv
3
3
  version: !ruby/object:Gem::Version
4
- version: 3.0.9
4
+ version: 3.0.10
5
5
  platform: ruby
6
6
  authors:
7
7
  - Elastic
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2023-06-14 00:00:00.000000000 Z
11
+ date: 2023-12-19 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement
@@ -30,20 +30,6 @@ dependencies:
30
30
  - - "<="
31
31
  - !ruby/object:Gem::Version
32
32
  version: '2.99'
33
- - !ruby/object:Gem::Dependency
34
- requirement: !ruby/object:Gem::Requirement
35
- requirements:
36
- - - ">="
37
- - !ruby/object:Gem::Version
38
- version: '0'
39
- name: logstash-input-generator
40
- prerelease: false
41
- type: :runtime
42
- version_requirements: !ruby/object:Gem::Requirement
43
- requirements:
44
- - - ">="
45
- - !ruby/object:Gem::Version
46
- version: '0'
47
33
  - !ruby/object:Gem::Dependency
48
34
  requirement: !ruby/object:Gem::Requirement
49
35
  requirements:
@@ -58,20 +44,6 @@ dependencies:
58
44
  - - ">="
59
45
  - !ruby/object:Gem::Version
60
46
  version: '0'
61
- - !ruby/object:Gem::Dependency
62
- requirement: !ruby/object:Gem::Requirement
63
- requirements:
64
- - - ">="
65
- - !ruby/object:Gem::Version
66
- version: '0'
67
- name: logstash-filter-json
68
- prerelease: false
69
- type: :runtime
70
- version_requirements: !ruby/object:Gem::Requirement
71
- requirements:
72
- - - ">="
73
- - !ruby/object:Gem::Version
74
- version: '0'
75
47
  - !ruby/object:Gem::Dependency
76
48
  requirement: !ruby/object:Gem::Requirement
77
49
  requirements:
@@ -86,20 +58,6 @@ dependencies:
86
58
  - - ">="
87
59
  - !ruby/object:Gem::Version
88
60
  version: '0'
89
- - !ruby/object:Gem::Dependency
90
- requirement: !ruby/object:Gem::Requirement
91
- requirements:
92
- - - ">="
93
- - !ruby/object:Gem::Version
94
- version: '0'
95
- name: insist
96
- prerelease: false
97
- type: :development
98
- version_requirements: !ruby/object:Gem::Requirement
99
- requirements:
100
- - - ">="
101
- - !ruby/object:Gem::Version
102
- version: '0'
103
61
  description: This gem is a Logstash plugin required to be installed on top of the
104
62
  Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This
105
63
  gem is not a stand-alone program