fluentd 1.2.5 → 1.2.6

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of fluentd might be problematic. Click here for more details.

checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: a8c8b50f9093742b22d626d6051608b7d9cce711
4
- data.tar.gz: 3e3ad4d8f6af353c97d19856303e0f2ff082a7ac
3
+ metadata.gz: 8429bf1a0e35278c81b67e3d517568038a67fb2c
4
+ data.tar.gz: ba5260ae4f5db3933aebbf2ba598f0c03d9ab878
5
5
  SHA512:
6
- metadata.gz: becc9444e4bbd8205e4bc7dea1414a5e29d4bb95f84b5708b01c9008369bdbf4a563fbe40e6230c328371fd8765177e2db59fd11ee7bd900e3cc0a58903768d3
7
- data.tar.gz: 62dfa273cb9754d521ff4f652d8390d9315bf1a2db519a7bc590ec61a56ca320e0ab2576dddf3ae7ee133d94779cfc665f026c1571c1d88f38767c68843abced
6
+ metadata.gz: 575ca7960c2f0560d103001eecbbaf43763394c238280a4aebc4b8ab61336a069d29917c8dca25be95a2b78557c80e172e3dc0834ed8d165046bc87c822be426
7
+ data.tar.gz: be8459ae73c470d11848afc2f5cdc5e4f57a9a74606715e3491a7660e1fd4e362b899c5c7ccbe72504c74bcf1c4d818c6cbb00db37707529e6b30dd4d98c13cf
@@ -1,5 +1,23 @@
1
1
  # v1.2
2
2
 
3
+ ## Release v1.2.6 - 2018/10/03
4
+
5
+ ### Enhancements
6
+
7
+ * output: Add `disable_chunk_backup` for ignore broken chunks.
8
+ https://github.com/fluent/fluentd/pull/2117
9
+ * parser_syslog: Improve regexp for RFC5424
10
+ https://github.com/fluent/fluentd/pull/2141
11
+ * in_http: Allow specifying the wildcard '*' as the CORS domain
12
+ https://github.com/fluent/fluentd/pull/2139
13
+
14
+ ### Bug fixes
15
+
16
+ * in_tail: Prevent thread switching in the interval between seek and read/write operations to pos_file
17
+ https://github.com/fluent/fluentd/pull/2118
18
+ * parser: Handle LoadError properly for oj
19
+ https://github.com/fluent/fluentd/pull/2140
20
+
3
21
  ## Release v1.2.5 - 2018/08/22
4
22
 
5
23
  ### Bug fixes
@@ -375,7 +375,7 @@ module Fluent::Plugin
375
375
  # For every incoming request, we check if we have some CORS
376
376
  # restrictions and white listed origins through @cors_allow_origins.
377
377
  unless @cors_allow_origins.nil?
378
- unless @cors_allow_origins.include?(@origin)
378
+ unless @cors_allow_origins.include?('*') or @cors_allow_origins.include?(@origin)
379
379
  send_response_and_close("403 Forbidden", {'Connection' => 'close'}, "")
380
380
  return
381
381
  end
@@ -422,7 +422,14 @@ module Fluent::Plugin
422
422
  code, header, body = *@callback.call(path_info, params)
423
423
  body = body.to_s
424
424
 
425
- header['Access-Control-Allow-Origin'] = @origin if !@cors_allow_origins.nil? && @cors_allow_origins.include?(@origin)
425
+ unless @cors_allow_origins.nil?
426
+ if @cors_allow_origins.include?('*')
427
+ header['Access-Control-Allow-Origin'] = '*'
428
+ elsif @cors_allow_origins.include?(@origin)
429
+ header['Access-Control-Allow-Origin'] = @origin
430
+ end
431
+ end
432
+
426
433
  if @keep_alive
427
434
  header['Connection'] = 'Keep-Alive'
428
435
  send_response(code, header, body)
@@ -869,8 +869,9 @@ module Fluent::Plugin
869
869
  class PositionFile
870
870
  UNWATCHED_POSITION = 0xffffffffffffffff
871
871
 
872
- def initialize(file, map, last_pos)
872
+ def initialize(file, file_mutex, map, last_pos)
873
873
  @file = file
874
+ @file_mutex = file_mutex
874
875
  @map = map
875
876
  @last_pos = last_pos
876
877
  end
@@ -880,31 +881,34 @@ module Fluent::Plugin
880
881
  return m
881
882
  end
882
883
 
883
- @file.pos = @last_pos
884
- @file.write path
885
- @file.write "\t"
886
- seek = @file.pos
887
- @file.write "0000000000000000\t0000000000000000\n"
888
- @last_pos = @file.pos
889
-
890
- @map[path] = FilePositionEntry.new(@file, seek, 0, 0)
884
+ @file_mutex.synchronize {
885
+ @file.pos = @last_pos
886
+ @file.write "#{path}\t0000000000000000\t0000000000000000\n"
887
+ seek = @last_pos + path.bytesize + 1
888
+ @last_pos = @file.pos
889
+ @map[path] = FilePositionEntry.new(@file, @file_mutex, seek, 0, 0)
890
+ }
891
891
  end
892
892
 
893
893
  def self.parse(file)
894
894
  compact(file)
895
895
 
896
+ file_mutex = Mutex.new
896
897
  map = {}
897
898
  file.pos = 0
898
899
  file.each_line {|line|
899
900
  m = /^([^\t]+)\t([0-9a-fA-F]+)\t([0-9a-fA-F]+)/.match(line)
900
- next unless m
901
+ unless m
902
+ $log.warn "Unparsable line in pos_file: #{line}"
903
+ next
904
+ end
901
905
  path = m[1]
902
906
  pos = m[2].to_i(16)
903
907
  ino = m[3].to_i(16)
904
908
  seek = file.pos - line.bytesize + path.bytesize + 1
905
- map[path] = FilePositionEntry.new(file, seek, pos, ino)
909
+ map[path] = FilePositionEntry.new(file, file_mutex, seek, pos, ino)
906
910
  }
907
- new(file, map, file.pos)
911
+ new(file, file_mutex, map, file.pos)
908
912
  end
909
913
 
910
914
  # Clean up unwatched file entries
@@ -912,7 +916,10 @@ module Fluent::Plugin
912
916
  file.pos = 0
913
917
  existent_entries = file.each_line.map { |line|
914
918
  m = /^([^\t]+)\t([0-9a-fA-F]+)\t([0-9a-fA-F]+)/.match(line)
915
- next unless m
919
+ unless m
920
+ $log.warn "Unparsable line in pos_file: #{line}"
921
+ next
922
+ end
916
923
  path = m[1]
917
924
  pos = m[2].to_i(16)
918
925
  ino = m[3].to_i(16)
@@ -935,23 +942,28 @@ module Fluent::Plugin
935
942
  LN_OFFSET = 33
936
943
  SIZE = 34
937
944
 
938
- def initialize(file, seek, pos, inode)
945
+ def initialize(file, file_mutex, seek, pos, inode)
939
946
  @file = file
947
+ @file_mutex = file_mutex
940
948
  @seek = seek
941
949
  @pos = pos
942
950
  @inode = inode
943
951
  end
944
952
 
945
953
  def update(ino, pos)
946
- @file.pos = @seek
947
- @file.write "%016x\t%016x" % [pos, ino]
954
+ @file_mutex.synchronize {
955
+ @file.pos = @seek
956
+ @file.write "%016x\t%016x" % [pos, ino]
957
+ }
948
958
  @pos = pos
949
959
  @inode = ino
950
960
  end
951
961
 
952
962
  def update_pos(pos)
953
- @file.pos = @seek
954
- @file.write "%016x" % pos
963
+ @file_mutex.synchronize {
964
+ @file.pos = @seek
965
+ @file.write "%016x" % pos
966
+ }
955
967
  @pos = pos
956
968
  end
957
969
 
@@ -96,6 +96,7 @@ module Fluent
96
96
  config_param :retry_max_interval, :time, default: nil, desc: 'The maximum interval seconds for exponential backoff between retries while failing.'
97
97
 
98
98
  config_param :retry_randomize, :bool, default: true, desc: 'If true, output plugin will retry after randomized interval not to do burst retries.'
99
+ config_param :disable_chunk_backup, :bool, default: false, desc: 'If true, chunks are thrown away when unrecoverable error happens'
99
100
  end
100
101
 
101
102
  config_section :secondary, param_name: :secondary_config, required: false, multi: false, final: true do
@@ -1161,17 +1162,21 @@ module Fluent
1161
1162
  end
1162
1163
 
1163
1164
  def backup_chunk(chunk, using_secondary, delayed_commit)
1164
- unique_id = dump_unique_id_hex(chunk.unique_id)
1165
- safe_plugin_id = plugin_id.gsub(/[ "\/\\:;|*<>?]/, '_')
1166
- backup_base_dir = system_config.root_dir || DEFAULT_BACKUP_DIR
1167
- backup_file = File.join(backup_base_dir, 'backup', "worker#{fluentd_worker_id}", safe_plugin_id, "#{unique_id}.log")
1168
- backup_dir = File.dirname(backup_file)
1169
-
1170
- log.warn "bad chunk is moved to #{backup_file}"
1171
- FileUtils.mkdir_p(backup_dir) unless Dir.exist?(backup_dir)
1172
- File.open(backup_file, 'ab', system_config.file_permission || 0644) { |f|
1173
- chunk.write_to(f)
1174
- }
1165
+ if @buffer_config.disable_chunk_backup
1166
+ log.warn "disable_chunk_backup is true. #{dump_unique_id_hex(chunk.unique_id)} chunk is thrown away"
1167
+ else
1168
+ unique_id = dump_unique_id_hex(chunk.unique_id)
1169
+ safe_plugin_id = plugin_id.gsub(/[ "\/\\:;|*<>?]/, '_')
1170
+ backup_base_dir = system_config.root_dir || DEFAULT_BACKUP_DIR
1171
+ backup_file = File.join(backup_base_dir, 'backup', "worker#{fluentd_worker_id}", safe_plugin_id, "#{unique_id}.log")
1172
+ backup_dir = File.dirname(backup_file)
1173
+
1174
+ log.warn "bad chunk is moved to #{backup_file}"
1175
+ FileUtils.mkdir_p(backup_dir) unless Dir.exist?(backup_dir)
1176
+ File.open(backup_file, 'ab', system_config.file_permission || 0644) { |f|
1177
+ chunk.write_to(f)
1178
+ }
1179
+ end
1175
1180
  commit_write(chunk.unique_id, secondary: using_secondary, delayed: delayed_commit)
1176
1181
  end
1177
1182
 
@@ -52,9 +52,15 @@ module Fluent
52
52
  else
53
53
  raise "BUG: unknown json parser specified: #{name}"
54
54
  end
55
- rescue LoadError
55
+ rescue LoadError => ex
56
56
  name = :yajl
57
- log.info "Oj is not installed, and failing back to Yajl for json parser" if log
57
+ if log
58
+ if /\boj\z/ =~ ex.message
59
+ log.info "Oj is not installed, and failing back to Yajl for json parser"
60
+ else
61
+ log.warn ex.message
62
+ end
63
+ end
58
64
  retry
59
65
  end
60
66
 
@@ -27,8 +27,8 @@ module Fluent
27
27
  REGEXP = /^(?<time>[^ ]*\s*[^ ]* [^ ]*) (?<host>[^ ]*) (?<ident>[^ :\[]*)(?:\[(?<pid>[0-9]+)\])?(?:[^\:]*\:)? *(?<message>.*)$/
28
28
  # From in_syslog default pattern
29
29
  REGEXP_WITH_PRI = /^\<(?<pri>[0-9]+)\>(?<time>[^ ]* {1,2}[^ ]* [^ ]*) (?<host>[^ ]*) (?<ident>[^ :\[]*)(?:\[(?<pid>[0-9]+)\])?(?:[^\:]*\:)? *(?<message>.*)$/
30
- REGEXP_RFC5424 = /\A^(?<time>[^ ]+) (?<host>[^ ]+) (?<ident>[^ ]+) (?<pid>.{1,128}) (?<msgid>[^ ]+) (?<extradata>(\[(.*)\]|[^ ])) (?<message>.+)$\z/
31
- REGEXP_RFC5424_WITH_PRI = /\A^\<(?<pri>[0-9]{1,3})\>[1-9]\d{0,2} (?<time>[^ ]+) (?<host>[^ ]+) (?<ident>[^ ]+) (?<pid>.{1,128}) (?<msgid>[^ ]+) (?<extradata>(\[(.*)\]|[^ ])) (?<message>.+)$\z/
30
+ REGEXP_RFC5424 = /\A^(?<time>[^ ]+) (?<host>[!-~]{1,255}) (?<ident>[!-~]{1,48}) (?<pid>[!-~]{1,128}) (?<msgid>[!-~]{1,32}) (?<extradata>(?:\-|\[(.*)\]))(?: (?<message>.+))?$\z/
31
+ REGEXP_RFC5424_WITH_PRI = /\A^\<(?<pri>[0-9]{1,3})\>[1-9]\d{0,2} (?<time>[^ ]+) (?<host>[!-~]{1,255}) (?<ident>[!-~]{1,48}) (?<pid>[!-~]{1,128}) (?<msgid>[!-~]{1,32}) (?<extradata>(?:\-|\[(.*)\]))(?: (?<message>.+))?$\z/
32
32
  REGEXP_DETECT_RFC5424 = /^\<.*\>[1-9]\d{0,2}/
33
33
 
34
34
  config_set_default :time_format, "%b %d %H:%M:%S"
@@ -16,6 +16,6 @@
16
16
 
17
17
  module Fluent
18
18
 
19
- VERSION = '1.2.5'
19
+ VERSION = '1.2.6'
20
20
 
21
21
  end
@@ -155,6 +155,7 @@ slow_flush_log_threshold: float: (20.0)
155
155
  retry_exponential_backoff_base: float: (2)
156
156
  retry_max_interval: time: (nil)
157
157
  retry_randomize: bool: (true)
158
+ disable_chunk_backup: bool: (false)
158
159
  <secondary>: optional, single
159
160
  @type: string: (nil)
160
161
  <buffer>: optional, single
@@ -604,6 +604,26 @@ class HttpInputTest < Test::Unit::TestCase
604
604
  assert_equal_event_time time, d.events[1][1]
605
605
  end
606
606
 
607
+ def test_cors_allowed_wildcard
608
+ d = create_driver(CONFIG + 'cors_allow_origins ["*"]')
609
+
610
+ time = event_time("2011-01-02 13:14:15 UTC")
611
+ events = [
612
+ ["tag1", time, {"a"=>1}],
613
+ ]
614
+
615
+ d.run do
616
+ events.each do |tag, time, record|
617
+ headers = {"Origin" => "http://foo.com"}
618
+
619
+ res = post("/#{tag}", {"json" => record.to_json, "time" => time.to_i}, headers)
620
+
621
+ assert_equal "200", res.code
622
+ assert_equal "*", res["Access-Control-Allow-Origin"]
623
+ end
624
+ end
625
+ end
626
+
607
627
  def test_content_encoding_gzip
608
628
  d = create_driver
609
629
 
@@ -279,5 +279,29 @@ class BufferedOutputBackupTest < Test::Unit::TestCase
279
279
  assert { logs.any? { |l| l.include?("got unrecoverable error in primary and secondary is async output") } }
280
280
  end
281
281
  end
282
+
283
+ test 'chunk is thrown away when disable_chunk_backup is true' do
284
+ Fluent::SystemConfig.overwrite_system_config('root_dir' => TMP_DIR) do
285
+ id = 'backup_test'
286
+ hash = {
287
+ 'flush_interval' => 1,
288
+ 'flush_thread_burst_interval' => 0.1,
289
+ 'disable_chunk_backup' => true
290
+ }
291
+ chunk_id = nil
292
+ @i.configure(config_element('ROOT', '', {'@id' => id}, [config_element('buffer', 'tag', hash)]))
293
+ @i.register(:write) { |chunk|
294
+ chunk_id = chunk.unique_id;
295
+ raise Fluent::UnrecoverableError, "yay, your #write must fail"
296
+ }
297
+
298
+ flush_chunks
299
+
300
+ target = "#{TMP_DIR}/backup/worker0/#{id}/#{@i.dump_unique_id_hex(chunk_id)}.log"
301
+ assert_false File.exist?(target)
302
+ logs = @i.log.out.logs
303
+ assert { logs.any? { |l| l.include?("disable_chunk_backup is true") } }
304
+ end
305
+ end
282
306
  end
283
307
  end
@@ -118,6 +118,23 @@ class SyslogParserTest < ::Test::Unit::TestCase
118
118
  @parser.instance.patterns['format'])
119
119
  end
120
120
 
121
+ def test_parse_with_rfc5424_empty_message_and_without_priority
122
+ @parser.configure(
123
+ 'time_format' => '%Y-%m-%dT%H:%M:%S.%L%z',
124
+ 'message_format' => 'rfc5424',
125
+ )
126
+ text = '2017-02-06T13:14:15.003Z 192.168.0.1 fluentd - - -'
127
+ @parser.instance.parse(text) do |time, record|
128
+ assert_equal(event_time("2017-02-06T13:14:15.003Z", format: '%Y-%m-%dT%H:%M:%S.%L%z'), time)
129
+ assert_equal "-", record["pid"]
130
+ assert_equal "-", record["msgid"]
131
+ assert_equal "-", record["extradata"]
132
+ assert_nil record["message"]
133
+ end
134
+ assert_equal(Fluent::Plugin::SyslogParser::REGEXP_RFC5424,
135
+ @parser.instance.patterns['format'])
136
+ end
137
+
121
138
  def test_parse_with_rfc5424_message_without_time_format
122
139
  @parser.configure(
123
140
  'message_format' => 'rfc5424',
@@ -133,6 +150,21 @@ class SyslogParserTest < ::Test::Unit::TestCase
133
150
  end
134
151
  end
135
152
 
153
+ def test_parse_with_rfc5424_message_with_priority_and_pid
154
+ @parser.configure(
155
+ 'message_format' => 'rfc5424',
156
+ 'with_priority' => true,
157
+ )
158
+ text = '<28>1 2018-09-26T15:54:26.620412+09:00 machine minissdpd 1298 - - peer 192.168.0.5:50123 is not from a LAN'
159
+ @parser.instance.parse(text) do |time, record|
160
+ assert_equal(event_time("2018-09-26T15:54:26.620412+0900", format: '%Y-%m-%dT%H:%M:%S.%L%z'), time)
161
+ assert_equal "1298", record["pid"]
162
+ assert_equal "-", record["msgid"]
163
+ assert_equal "-", record["extradata"]
164
+ assert_equal " peer 192.168.0.5:50123 is not from a LAN", record["message"]
165
+ end
166
+ end
167
+
136
168
  def test_parse_with_rfc5424_structured_message
137
169
  @parser.configure(
138
170
  'time_format' => '%Y-%m-%dT%H:%M:%S.%L%z',
@@ -142,13 +174,64 @@ class SyslogParserTest < ::Test::Unit::TestCase
142
174
  text = '<16>1 2017-02-06T13:14:15.003Z 192.168.0.1 fluentd 11111 ID24224 [exampleSDID@20224 iut="3" eventSource="Application" eventID="11211"] Hi, from Fluentd!'
143
175
  @parser.instance.parse(text) do |time, record|
144
176
  assert_equal(event_time("2017-02-06T13:14:15.003Z", format: '%Y-%m-%dT%H:%M:%S.%L%z'), time)
145
- assert_equal "11111", record["pid"]
177
+ assert_equal "11111", record["pid"]
146
178
  assert_equal "ID24224", record["msgid"]
147
179
  assert_equal "[exampleSDID@20224 iut=\"3\" eventSource=\"Application\" eventID=\"11211\"]",
148
180
  record["extradata"]
149
181
  assert_equal "Hi, from Fluentd!", record["message"]
150
182
  end
151
183
  end
184
+
185
+ def test_parse_with_rfc5424_multiple_structured_message
186
+ @parser.configure(
187
+ 'time_format' => '%Y-%m-%dT%H:%M:%S.%L%z',
188
+ 'message_format' => 'rfc5424',
189
+ 'with_priority' => true,
190
+ )
191
+ text = '<16>1 2017-02-06T13:14:15.003Z 192.168.0.1 fluentd 11111 ID24224 [exampleSDID@20224 iut="3" eventSource="Application" eventID="11211"][exampleSDID@20224 class="high"] Hi, from Fluentd!'
192
+ @parser.instance.parse(text) do |time, record|
193
+ assert_equal(event_time("2017-02-06T13:14:15.003Z", format: '%Y-%m-%dT%H:%M:%S.%L%z'), time)
194
+ assert_equal "11111", record["pid"]
195
+ assert_equal "ID24224", record["msgid"]
196
+ assert_equal "[exampleSDID@20224 iut=\"3\" eventSource=\"Application\" eventID=\"11211\"][exampleSDID@20224 class=\"high\"]",
197
+ record["extradata"]
198
+ assert_equal "Hi, from Fluentd!", record["message"]
199
+ end
200
+ end
201
+
202
+ def test_parse_with_rfc5424_message_includes_right_bracket
203
+ @parser.configure(
204
+ 'time_format' => '%Y-%m-%dT%H:%M:%S.%L%z',
205
+ 'message_format' => 'rfc5424',
206
+ 'with_priority' => true,
207
+ )
208
+ text = '<16>1 2017-02-06T13:14:15.003Z 192.168.0.1 fluentd 11111 ID24224 [exampleSDID@20224 iut="3" eventSource="Application" eventID="11211"] Hi, from Fluentd]!'
209
+ @parser.instance.parse(text) do |time, record|
210
+ assert_equal(event_time("2017-02-06T13:14:15.003Z", format: '%Y-%m-%dT%H:%M:%S.%L%z'), time)
211
+ assert_equal "11111", record["pid"]
212
+ assert_equal "ID24224", record["msgid"]
213
+ assert_equal "[exampleSDID@20224 iut=\"3\" eventSource=\"Application\" eventID=\"11211\"]",
214
+ record["extradata"]
215
+ assert_equal "Hi, from Fluentd]!", record["message"]
216
+ end
217
+ end
218
+
219
+ def test_parse_with_rfc5424_empty_message
220
+ @parser.configure(
221
+ 'time_format' => '%Y-%m-%dT%H:%M:%S.%L%z',
222
+ 'message_format' => 'rfc5424',
223
+ 'with_priority' => true,
224
+ )
225
+ text = '<16>1 2017-02-06T13:14:15.003Z 192.168.0.1 fluentd 11111 ID24224 [exampleSDID@20224 iut="3" eventSource="Application" eventID="11211"]'
226
+ @parser.instance.parse(text) do |time, record|
227
+ assert_equal(event_time("2017-02-06T13:14:15.003Z", format: '%Y-%m-%dT%H:%M:%S.%L%z'), time)
228
+ assert_equal "11111", record["pid"]
229
+ assert_equal "ID24224", record["msgid"]
230
+ assert_equal "[exampleSDID@20224 iut=\"3\" eventSource=\"Application\" eventID=\"11211\"]",
231
+ record["extradata"]
232
+ assert_nil record["message"]
233
+ end
234
+ end
152
235
  end
153
236
 
154
237
  class TestAutoRegexp < self
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fluentd
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.2.5
4
+ version: 1.2.6
5
5
  platform: ruby
6
6
  authors:
7
7
  - Sadayuki Furuhashi
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2018-08-23 00:00:00.000000000 Z
11
+ date: 2018-10-04 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: msgpack