fluentd 1.11.0-x64-mingw32 → 1.11.5-x64-mingw32

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of fluentd might be problematic. Click here for more details.

Files changed (78) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +92 -1
  3. data/example/copy_roundrobin.conf +3 -3
  4. data/example/counter.conf +1 -1
  5. data/example/filter_stdout.conf +2 -2
  6. data/example/{in_dummy_blocks.conf → in_sample_blocks.conf} +4 -4
  7. data/example/{in_dummy_with_compression.conf → in_sample_with_compression.conf} +3 -3
  8. data/example/logevents.conf +5 -5
  9. data/example/multi_filters.conf +1 -1
  10. data/example/out_exec_filter.conf +2 -2
  11. data/example/out_forward.conf +1 -1
  12. data/example/out_forward_buf_file.conf +1 -1
  13. data/example/out_forward_client.conf +5 -5
  14. data/example/out_forward_heartbeat_none.conf +1 -1
  15. data/example/out_forward_sd.conf +1 -1
  16. data/example/out_forward_shared_key.conf +2 -2
  17. data/example/out_forward_tls.conf +1 -1
  18. data/example/out_forward_users.conf +3 -3
  19. data/example/out_null.conf +4 -4
  20. data/example/secondary_file.conf +1 -1
  21. data/fluentd.gemspec +6 -6
  22. data/lib/fluent/command/fluentd.rb +11 -0
  23. data/lib/fluent/log.rb +33 -3
  24. data/lib/fluent/match.rb +9 -0
  25. data/lib/fluent/plugin/buffer.rb +49 -40
  26. data/lib/fluent/plugin/buffer/chunk.rb +2 -1
  27. data/lib/fluent/plugin/formatter.rb +24 -0
  28. data/lib/fluent/plugin/formatter_hash.rb +3 -1
  29. data/lib/fluent/plugin/formatter_json.rb +3 -1
  30. data/lib/fluent/plugin/formatter_ltsv.rb +3 -1
  31. data/lib/fluent/plugin/formatter_out_file.rb +3 -1
  32. data/lib/fluent/plugin/formatter_single_value.rb +3 -1
  33. data/lib/fluent/plugin/formatter_tsv.rb +3 -1
  34. data/lib/fluent/plugin/in_dummy.rb +2 -123
  35. data/lib/fluent/plugin/in_exec.rb +4 -2
  36. data/lib/fluent/plugin/in_http.rb +148 -77
  37. data/lib/fluent/plugin/in_sample.rb +141 -0
  38. data/lib/fluent/plugin/in_tail.rb +2 -2
  39. data/lib/fluent/plugin/out_http.rb +20 -2
  40. data/lib/fluent/plugin/output.rb +8 -5
  41. data/lib/fluent/plugin/parser_json.rb +5 -2
  42. data/lib/fluent/plugin_helper/cert_option.rb +5 -8
  43. data/lib/fluent/plugin_helper/child_process.rb +3 -2
  44. data/lib/fluent/plugin_helper/inject.rb +2 -1
  45. data/lib/fluent/plugin_helper/socket.rb +1 -1
  46. data/lib/fluent/supervisor.rb +11 -6
  47. data/lib/fluent/system_config.rb +2 -1
  48. data/lib/fluent/version.rb +1 -1
  49. data/test/command/test_binlog_reader.rb +22 -6
  50. data/test/plugin/test_buffer.rb +4 -0
  51. data/test/plugin/test_filter_stdout.rb +6 -1
  52. data/test/plugin/test_formatter_hash.rb +6 -3
  53. data/test/plugin/test_formatter_json.rb +14 -4
  54. data/test/plugin/test_formatter_ltsv.rb +13 -5
  55. data/test/plugin/test_formatter_out_file.rb +35 -14
  56. data/test/plugin/test_formatter_single_value.rb +12 -6
  57. data/test/plugin/test_formatter_tsv.rb +12 -4
  58. data/test/plugin/test_in_exec.rb +18 -0
  59. data/test/plugin/test_in_http.rb +57 -0
  60. data/test/plugin/{test_in_dummy.rb → test_in_sample.rb} +25 -25
  61. data/test/plugin/test_in_tail.rb +3 -0
  62. data/test/plugin/test_out_file.rb +23 -18
  63. data/test/plugin/test_output.rb +12 -0
  64. data/test/plugin_helper/data/cert/empty.pem +0 -0
  65. data/test/plugin_helper/test_cert_option.rb +7 -0
  66. data/test/plugin_helper/test_child_process.rb +15 -0
  67. data/test/plugin_helper/test_compat_parameters.rb +7 -2
  68. data/test/plugin_helper/test_http_server_helper.rb +5 -0
  69. data/test/plugin_helper/test_inject.rb +13 -0
  70. data/test/plugin_helper/test_server.rb +34 -0
  71. data/test/plugin_helper/test_socket.rb +8 -0
  72. data/test/test_formatter.rb +34 -10
  73. data/test/test_log.rb +44 -0
  74. data/test/test_match.rb +11 -0
  75. data/test/test_output.rb +6 -1
  76. data/test/test_static_config_analysis.rb +2 -2
  77. data/test/test_supervisor.rb +26 -0
  78. metadata +21 -18
@@ -33,6 +33,15 @@ module Fluent
33
33
 
34
34
  class GlobMatchPattern < MatchPattern
35
35
  def initialize(pat)
36
+ if pat.start_with?('/')
37
+ if pat.end_with?('/')
38
+ @regex = Regexp.new("\\A"+pat[1..-2]+"\\Z")
39
+ return
40
+ else
41
+ raise Fluent::ConfigError, "invalid match - regex"
42
+ end
43
+ end
44
+
36
45
  stack = []
37
46
  regex = ['']
38
47
  escape = false
@@ -143,17 +143,33 @@ module Fluent
143
143
  end
144
144
  end
145
145
 
146
+ # timekey should be unixtime as usual.
147
+ # So, unixtime should be bigger than 2^30 - 1 (= 1073741823) nowadays.
148
+ # We should check object_id stability to use object_id as optimization for comparing operations.
149
+ # e.g.)
150
+ # irb> Time.parse("2020/07/31 18:30:00+09:00").to_i
151
+ # => 1596187800
152
+ # irb> Time.parse("2020/07/31 18:30:00+09:00").to_i > 2**30 -1
153
+ # => true
154
+ def self.enable_optimize?
155
+ a1 = 2**30 - 1
156
+ a2 = 2**30 - 1
157
+ b1 = 2**62 - 1
158
+ b2 = 2**62 - 1
159
+ (a1.object_id == a2.object_id) && (b1.object_id == b2.object_id)
160
+ end
161
+
146
162
  # This is an optimization code. Current Struct's implementation is comparing all data.
147
163
  # https://github.com/ruby/ruby/blob/0623e2b7cc621b1733a760b72af246b06c30cf96/struct.c#L1200-L1203
148
164
  # Actually this overhead is very small but this class is generated *per chunk* (and used in hash object).
149
165
  # This means that this class is one of the most called object in Fluentd.
150
166
  # See https://github.com/fluent/fluentd/pull/2560
151
- # But, this optimization has a side effect on Windows due to differing object_id.
167
+ # But, this optimization has a side effect on Windows and 32bit environment(s) due to differing object_id.
152
168
  # This difference causes flood of buffer files.
153
- # So, this optimization should be enabled on non-Windows platform.
169
+ # So, this optimization should be enabled on `enable_optimize?` as true platforms.
154
170
  def hash
155
171
  timekey.object_id
156
- end unless Fluent.windows?
172
+ end if enable_optimize?
157
173
  end
158
174
 
159
175
  # for tests
@@ -176,6 +192,7 @@ module Fluent
176
192
 
177
193
  @stage_size = @queue_size = 0
178
194
  @timekeys = Hash.new(0)
195
+ @enable_update_timekeys = false
179
196
  @mutex = Mutex.new
180
197
  end
181
198
 
@@ -191,24 +208,23 @@ module Fluent
191
208
  end
192
209
  end
193
210
 
211
+ def enable_update_timekeys
212
+ @enable_update_timekeys = true
213
+ end
214
+
194
215
  def start
195
216
  super
196
217
 
197
218
  @stage, @queue = resume
198
219
  @stage.each_pair do |metadata, chunk|
199
220
  @stage_size += chunk.bytesize
200
- if chunk.metadata && chunk.metadata.timekey
201
- add_timekey(metadata.timekey)
202
- end
203
221
  end
204
222
  @queue.each do |chunk|
205
223
  @queued_num[chunk.metadata] ||= 0
206
224
  @queued_num[chunk.metadata] += 1
207
225
  @queue_size += chunk.bytesize
208
- if chunk.metadata && chunk.metadata.timekey
209
- add_timekey(chunk.metadata.timekey)
210
- end
211
226
  end
227
+ update_timekeys
212
228
  log.debug "buffer started", instance: self.object_id, stage_size: @stage_size, queue_size: @queue_size
213
229
  end
214
230
 
@@ -257,12 +273,9 @@ module Fluent
257
273
  Metadata.new(timekey, tag, variables)
258
274
  end
259
275
 
276
+ # Keep this method for existing code
260
277
  def metadata(timekey: nil, tag: nil, variables: nil)
261
- meta = Metadata.new(timekey, tag, variables)
262
- if (t = meta.timekey)
263
- add_timekey(t)
264
- end
265
- meta
278
+ Metadata.new(timekey, tag, variables)
266
279
  end
267
280
 
268
281
  def timekeys
@@ -456,9 +469,23 @@ module Fluent
456
469
  end
457
470
  end
458
471
 
472
+ def update_timekeys
473
+ synchronize do
474
+ chunks = @stage.values
475
+ chunks.concat(@queue)
476
+ @timekeys = chunks.each_with_object({}) do |chunk, keys|
477
+ if chunk.metadata && chunk.metadata.timekey
478
+ t = chunk.metadata.timekey
479
+ keys[t] = keys.fetch(t, 0) + 1
480
+ end
481
+ end
482
+ end
483
+ end
484
+
459
485
  # At flush_at_shutdown, all staged chunks should be enqueued for buffer flush. Set true to force_enqueue for it.
460
486
  def enqueue_all(force_enqueue = false)
461
487
  log.on_trace { log.trace "enqueueing all chunks in buffer", instance: self.object_id }
488
+ update_timekeys if @enable_update_timekeys
462
489
 
463
490
  if block_given?
464
491
  synchronize{ @stage.keys }.each do |metadata|
@@ -537,10 +564,6 @@ module Fluent
537
564
  log.trace "chunk purged", instance: self.object_id, chunk_id: dump_unique_id_hex(chunk_id), metadata: metadata
538
565
  end
539
566
 
540
- if metadata && metadata.timekey
541
- del_timekey(metadata.timekey)
542
- end
543
-
544
567
  nil
545
568
  end
546
569
 
@@ -604,7 +627,11 @@ module Fluent
604
627
 
605
628
  if chunk_size_over?(chunk)
606
629
  if format && empty_chunk
607
- log.warn "chunk bytes limit exceeds for an emitted event stream: #{adding_bytesize}bytes"
630
+ if chunk.bytesize > @chunk_limit_size
631
+ log.warn "chunk bytes limit exceeds for an emitted event stream: #{adding_bytesize}bytes"
632
+ else
633
+ log.warn "chunk size limit exceeds for an emitted event stream: #{chunk.size}records"
634
+ end
608
635
  end
609
636
  chunk.rollback
610
637
 
@@ -768,11 +795,11 @@ module Fluent
768
795
  'total_queued_size' => stage_size + queue_size,
769
796
  }
770
797
 
771
- if (m = timekeys.min)
798
+ tkeys = timekeys
799
+ if (m = tkeys.min)
772
800
  stats['oldest_timekey'] = m
773
801
  end
774
-
775
- if (m = timekeys.max)
802
+ if (m = tkeys.max)
776
803
  stats['newest_timekey'] = m
777
804
  end
778
805
 
@@ -789,24 +816,6 @@ module Fluent
789
816
  !@queue.empty?
790
817
  end
791
818
  end
792
-
793
- def add_timekey(t)
794
- @mutex.synchronize do
795
- @timekeys[t] += 1
796
- end
797
- nil
798
- end
799
-
800
- def del_timekey(t)
801
- @mutex.synchronize do
802
- if @timekeys[t] <= 1
803
- @timekeys.delete(t)
804
- else
805
- @timekeys[t] -= 1
806
- end
807
- end
808
- nil
809
- end
810
819
  end
811
820
  end
812
821
  end
@@ -206,8 +206,9 @@ module Fluent
206
206
  output_io = if chunk_io.is_a?(StringIO)
207
207
  StringIO.new
208
208
  else
209
- Tempfile.new('decompressed-data').binmode
209
+ Tempfile.new('decompressed-data')
210
210
  end
211
+ output_io.binmode if output_io.is_a?(Tempfile)
211
212
  decompress(input_io: chunk_io, output_io: output_io)
212
213
  output_io.seek(0, IO::SEEK_SET)
213
214
  yield output_io
@@ -46,5 +46,29 @@ module Fluent
46
46
  @proc.call(tag, time, record)
47
47
  end
48
48
  end
49
+
50
+ module Newline
51
+ module Mixin
52
+ include Fluent::Configurable
53
+
54
+ DEFAULT_NEWLINE = if Fluent.windows?
55
+ :crlf
56
+ else
57
+ :lf
58
+ end
59
+
60
+ config_param :newline, :enum, list: [:lf, :crlf], default: DEFAULT_NEWLINE
61
+
62
+ def configure(conf)
63
+ super
64
+ @newline = case newline
65
+ when :lf
66
+ "\n"
67
+ when :crlf
68
+ "\r\n"
69
+ end
70
+ end
71
+ end
72
+ end
49
73
  end
50
74
  end
@@ -19,13 +19,15 @@ require 'fluent/plugin/formatter'
19
19
  module Fluent
20
20
  module Plugin
21
21
  class HashFormatter < Formatter
22
+ include Fluent::Plugin::Newline::Mixin
23
+
22
24
  Plugin.register_formatter('hash', self)
23
25
 
24
26
  config_param :add_newline, :bool, default: true
25
27
 
26
28
  def format(tag, time, record)
27
29
  line = record.to_s
28
- line << "\n".freeze if @add_newline
30
+ line << @newline.freeze if @add_newline
29
31
  line
30
32
  end
31
33
  end
@@ -20,6 +20,8 @@ require 'fluent/env'
20
20
  module Fluent
21
21
  module Plugin
22
22
  class JSONFormatter < Formatter
23
+ include Fluent::Plugin::Newline::Mixin
24
+
23
25
  Plugin.register_formatter('json', self)
24
26
 
25
27
  config_param :json_parser, :string, default: 'oj'
@@ -44,7 +46,7 @@ module Fluent
44
46
  end
45
47
 
46
48
  def format(tag, time, record)
47
- "#{@dump_proc.call(record)}\n"
49
+ "#{@dump_proc.call(record)}#{@newline}"
48
50
  end
49
51
 
50
52
  def format_without_nl(tag, time, record)
@@ -19,6 +19,8 @@ require 'fluent/plugin/formatter'
19
19
  module Fluent
20
20
  module Plugin
21
21
  class LabeledTSVFormatter < Formatter
22
+ include Fluent::Plugin::Newline::Mixin
23
+
22
24
  Plugin.register_formatter('ltsv', self)
23
25
 
24
26
  # http://ltsv.org/
@@ -34,7 +36,7 @@ module Fluent
34
36
  formatted << @delimiter if formatted.length.nonzero?
35
37
  formatted << "#{label}#{@label_delimiter}#{value}"
36
38
  end
37
- formatted << "\n".freeze if @add_newline
39
+ formatted << @newline.freeze if @add_newline
38
40
  formatted
39
41
  end
40
42
  end
@@ -21,6 +21,8 @@ require 'yajl'
21
21
  module Fluent
22
22
  module Plugin
23
23
  class OutFileFormatter < Formatter
24
+ include Fluent::Plugin::Newline::Mixin
25
+
24
26
  Plugin.register_formatter('out_file', self)
25
27
 
26
28
  config_param :output_time, :bool, default: true
@@ -44,7 +46,7 @@ module Fluent
44
46
  header = ''
45
47
  header << "#{@timef.format(time)}#{@delimiter}" if @output_time
46
48
  header << "#{tag}#{@delimiter}" if @output_tag
47
- "#{header}#{Yajl.dump(record)}\n"
49
+ "#{header}#{Yajl.dump(record)}#{@newline}"
48
50
  end
49
51
  end
50
52
  end
@@ -19,6 +19,8 @@ require 'fluent/plugin/formatter'
19
19
  module Fluent
20
20
  module Plugin
21
21
  class SingleValueFormatter < Formatter
22
+ include Fluent::Plugin::Newline::Mixin
23
+
22
24
  Plugin.register_formatter('single_value', self)
23
25
 
24
26
  config_param :message_key, :string, default: 'message'
@@ -26,7 +28,7 @@ module Fluent
26
28
 
27
29
  def format(tag, time, record)
28
30
  text = record[@message_key].to_s.dup
29
- text << "\n" if @add_newline
31
+ text << @newline.freeze if @add_newline
30
32
  text
31
33
  end
32
34
  end
@@ -19,6 +19,8 @@ require 'fluent/plugin/formatter'
19
19
  module Fluent
20
20
  module Plugin
21
21
  class TSVFormatter < Formatter
22
+ include Fluent::Plugin::Newline::Mixin
23
+
22
24
  Plugin.register_formatter('tsv', self)
23
25
 
24
26
  desc 'Field names included in each lines'
@@ -30,7 +32,7 @@ module Fluent
30
32
 
31
33
  def format(tag, time, record)
32
34
  formatted = @keys.map{|k| record[k].to_s }.join(@delimiter)
33
- formatted << "\n".freeze if @add_newline
35
+ formatted << @newline.freeze if @add_newline
34
36
  formatted
35
37
  end
36
38
  end
@@ -14,126 +14,5 @@
14
14
  # limitations under the License.
15
15
  #
16
16
 
17
- require 'json'
18
-
19
- require 'fluent/plugin/input'
20
- require 'fluent/config/error'
21
-
22
- module Fluent::Plugin
23
- class DummyInput < Input
24
- Fluent::Plugin.register_input('dummy', self)
25
-
26
- helpers :thread, :storage
27
-
28
- BIN_NUM = 10
29
- DEFAULT_STORAGE_TYPE = 'local'
30
-
31
- desc "The value is the tag assigned to the generated events."
32
- config_param :tag, :string
33
- desc "The number of events in event stream of each emits."
34
- config_param :size, :integer, default: 1
35
- desc "It configures how many events to generate per second."
36
- config_param :rate, :integer, default: 1
37
- desc "If specified, each generated event has an auto-incremented key field."
38
- config_param :auto_increment_key, :string, default: nil
39
- desc "The boolean to suspend-and-resume incremental value after restart"
40
- config_param :suspend, :bool, default: false,deprecated: 'This parameters is ignored'
41
- desc "The dummy data to be generated. An array of JSON hashes or a single JSON hash."
42
- config_param :dummy, default: [{"message"=>"dummy"}] do |val|
43
- begin
44
- parsed = JSON.parse(val)
45
- rescue JSON::ParserError => ex
46
- # Fluent::ConfigParseError, "got incomplete JSON" will be raised
47
- # at literal_parser.rb with --use-v1-config, but I had to
48
- # take care at here for the case of --use-v0-config.
49
- raise Fluent::ConfigError, "#{ex.class}: #{ex.message}"
50
- end
51
- dummy = parsed.is_a?(Array) ? parsed : [parsed]
52
- dummy.each_with_index do |e, i|
53
- raise Fluent::ConfigError, "#{i}th element of dummy, #{e}, is not a hash" unless e.is_a?(Hash)
54
- end
55
- dummy
56
- end
57
-
58
- def initialize
59
- super
60
- @storage = nil
61
- end
62
-
63
- def configure(conf)
64
- super
65
- @dummy_index = 0
66
- config = conf.elements.select{|e| e.name == 'storage' }.first
67
- @storage = storage_create(usage: 'suspend', conf: config, default_type: DEFAULT_STORAGE_TYPE)
68
- end
69
-
70
- def multi_workers_ready?
71
- true
72
- end
73
-
74
- def start
75
- super
76
-
77
- @storage.put(:increment_value, 0) unless @storage.get(:increment_value)
78
- @storage.put(:dummy_index, 0) unless @storage.get(:dummy_index)
79
-
80
- if @auto_increment_key && !@storage.get(:auto_increment_value)
81
- @storage.put(:auto_increment_value, -1)
82
- end
83
-
84
- thread_create(:dummy_input, &method(:run))
85
- end
86
-
87
- def run
88
- batch_num = (@rate / BIN_NUM).to_i
89
- residual_num = (@rate % BIN_NUM)
90
- while thread_current_running?
91
- current_time = Time.now.to_i
92
- BIN_NUM.times do
93
- break unless (thread_current_running? && Time.now.to_i <= current_time)
94
- wait(0.1) { emit(batch_num) }
95
- end
96
- emit(residual_num) if thread_current_running?
97
- # wait for next second
98
- while thread_current_running? && Time.now.to_i <= current_time
99
- sleep 0.01
100
- end
101
- end
102
- end
103
-
104
- def emit(num)
105
- begin
106
- if @size > 1
107
- num.times do
108
- router.emit_array(@tag, Array.new(@size) { [Fluent::EventTime.now, generate] })
109
- end
110
- else
111
- num.times { router.emit(@tag, Fluent::EventTime.now, generate) }
112
- end
113
- rescue => _
114
- # ignore all errors not to stop emits by emit errors
115
- end
116
- end
117
-
118
- def generate
119
- d = @dummy[@dummy_index]
120
- unless d
121
- @dummy_index = 0
122
- d = @dummy[@dummy_index]
123
- end
124
- @dummy_index += 1
125
- if @auto_increment_key
126
- d = d.dup
127
- d[@auto_increment_key] = @storage.update(:auto_increment_value){|v| v + 1 }
128
- end
129
- d
130
- end
131
-
132
- def wait(time)
133
- start_time = Time.now
134
- yield
135
- sleep_time = time - (Time.now - start_time)
136
- sleep sleep_time if sleep_time > 0
137
- end
138
- end
139
- end
17
+ # Remove this file in fluentd v2
18
+ require_relative 'in_sample'