fluentd 0.14.1 → 0.14.2

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of fluentd might be problematic. Click here for more details.

Files changed (129) hide show
  1. checksums.yaml +4 -4
  2. data/ChangeLog +110 -1
  3. data/Rakefile +5 -1
  4. data/appveyor.yml +7 -1
  5. data/example/in_forward.conf +4 -0
  6. data/lib/fluent/compat/exec_util.rb +129 -0
  7. data/lib/fluent/compat/file_util.rb +54 -0
  8. data/lib/fluent/compat/filter.rb +21 -3
  9. data/lib/fluent/compat/formatter.rb +4 -2
  10. data/lib/fluent/compat/formatter_utils.rb +85 -0
  11. data/lib/fluent/compat/handle_tag_and_time_mixin.rb +60 -0
  12. data/lib/fluent/compat/input.rb +1 -3
  13. data/lib/fluent/compat/output.rb +95 -39
  14. data/lib/fluent/compat/parser.rb +17 -0
  15. data/lib/fluent/compat/parser_utils.rb +40 -0
  16. data/lib/fluent/compat/socket_util.rb +165 -0
  17. data/lib/fluent/compat/string_util.rb +34 -0
  18. data/lib/fluent/{test/driver/owner.rb → compat/structured_format_mixin.rb} +5 -11
  19. data/lib/fluent/config/element.rb +2 -2
  20. data/lib/fluent/configurable.rb +2 -1
  21. data/lib/fluent/event.rb +61 -7
  22. data/lib/fluent/event_router.rb +1 -1
  23. data/lib/fluent/plugin.rb +7 -7
  24. data/lib/fluent/plugin/buf_file.rb +5 -2
  25. data/lib/fluent/plugin/buffer.rb +194 -64
  26. data/lib/fluent/plugin/buffer/chunk.rb +28 -3
  27. data/lib/fluent/plugin/buffer/file_chunk.rb +5 -21
  28. data/lib/fluent/plugin/buffer/memory_chunk.rb +1 -11
  29. data/lib/fluent/plugin/exec_util.rb +2 -112
  30. data/lib/fluent/plugin/file_util.rb +3 -38
  31. data/lib/fluent/plugin/file_wrapper.rb +1 -1
  32. data/lib/fluent/plugin/filter_grep.rb +3 -7
  33. data/lib/fluent/plugin/filter_record_transformer.rb +5 -5
  34. data/lib/fluent/plugin/filter_stdout.rb +18 -11
  35. data/lib/fluent/plugin/formatter.rb +0 -48
  36. data/lib/fluent/plugin/formatter_csv.rb +7 -8
  37. data/lib/fluent/plugin/formatter_hash.rb +1 -4
  38. data/lib/fluent/plugin/formatter_json.rb +1 -4
  39. data/lib/fluent/plugin/formatter_ltsv.rb +5 -6
  40. data/lib/fluent/plugin/formatter_msgpack.rb +1 -4
  41. data/lib/fluent/plugin/formatter_out_file.rb +36 -3
  42. data/lib/fluent/plugin/formatter_stdout.rb +36 -1
  43. data/lib/fluent/plugin/in_dummy.rb +9 -2
  44. data/lib/fluent/plugin/in_exec.rb +20 -57
  45. data/lib/fluent/plugin/in_forward.rb +4 -3
  46. data/lib/fluent/plugin/in_object_space.rb +8 -44
  47. data/lib/fluent/plugin/in_syslog.rb +13 -24
  48. data/lib/fluent/plugin/in_tail.rb +3 -0
  49. data/lib/fluent/plugin/out_buffered_stdout.rb +14 -4
  50. data/lib/fluent/plugin/out_exec.rb +7 -5
  51. data/lib/fluent/plugin/out_exec_filter.rb +10 -10
  52. data/lib/fluent/plugin/out_file.rb +1 -3
  53. data/lib/fluent/plugin/out_forward.rb +38 -57
  54. data/lib/fluent/plugin/out_stdout.rb +14 -5
  55. data/lib/fluent/plugin/out_stream.rb +3 -0
  56. data/lib/fluent/plugin/output.rb +31 -14
  57. data/lib/fluent/plugin/parser.rb +0 -69
  58. data/lib/fluent/plugin/parser_apache.rb +10 -6
  59. data/lib/fluent/plugin/parser_apache_error.rb +8 -3
  60. data/lib/fluent/plugin/parser_csv.rb +3 -1
  61. data/lib/fluent/plugin/parser_json.rb +1 -1
  62. data/lib/fluent/plugin/parser_multiline.rb +5 -3
  63. data/lib/fluent/plugin/parser_nginx.rb +10 -6
  64. data/lib/fluent/plugin/parser_regexp.rb +73 -0
  65. data/lib/fluent/plugin/socket_util.rb +2 -148
  66. data/lib/fluent/plugin/storage_local.rb +1 -1
  67. data/lib/fluent/plugin/string_util.rb +3 -18
  68. data/lib/fluent/plugin_helper.rb +1 -0
  69. data/lib/fluent/plugin_helper/compat_parameters.rb +166 -41
  70. data/lib/fluent/plugin_helper/formatter.rb +30 -19
  71. data/lib/fluent/plugin_helper/inject.rb +25 -12
  72. data/lib/fluent/plugin_helper/parser.rb +22 -13
  73. data/lib/fluent/plugin_helper/storage.rb +22 -13
  74. data/lib/fluent/registry.rb +19 -6
  75. data/lib/fluent/supervisor.rb +27 -1
  76. data/lib/fluent/test/driver/base.rb +16 -92
  77. data/lib/fluent/test/driver/base_owned.rb +17 -53
  78. data/lib/fluent/test/driver/base_owner.rb +125 -0
  79. data/lib/fluent/test/driver/filter.rb +24 -2
  80. data/lib/fluent/test/driver/input.rb +2 -2
  81. data/lib/fluent/test/driver/multi_output.rb +2 -2
  82. data/lib/fluent/test/driver/output.rb +3 -5
  83. data/lib/fluent/test/helpers.rb +25 -0
  84. data/lib/fluent/test/input_test.rb +4 -4
  85. data/lib/fluent/test/output_test.rb +3 -3
  86. data/lib/fluent/version.rb +1 -1
  87. data/test/config/test_element.rb +135 -6
  88. data/test/plugin/test_buf_file.rb +71 -3
  89. data/test/plugin/test_buffer.rb +305 -86
  90. data/test/plugin/test_buffer_chunk.rb +60 -2
  91. data/test/plugin/test_buffer_file_chunk.rb +4 -3
  92. data/test/plugin/test_filter_grep.rb +25 -21
  93. data/test/plugin/test_filter_record_transformer.rb +75 -67
  94. data/test/plugin/test_filter_stdout.rb +171 -74
  95. data/test/plugin/test_formatter_csv.rb +94 -0
  96. data/test/plugin/test_formatter_json.rb +30 -0
  97. data/test/plugin/test_formatter_ltsv.rb +52 -0
  98. data/test/plugin/test_formatter_msgpack.rb +28 -0
  99. data/test/plugin/test_formatter_out_file.rb +95 -0
  100. data/test/plugin/test_formatter_single_value.rb +38 -0
  101. data/test/plugin/test_in_dummy.rb +95 -0
  102. data/test/plugin/test_in_exec.rb +27 -31
  103. data/test/plugin/test_in_forward.rb +24 -0
  104. data/test/plugin/test_in_gc_stat.rb +5 -5
  105. data/test/plugin/test_in_object_space.rb +4 -4
  106. data/test/plugin/test_in_syslog.rb +60 -35
  107. data/test/plugin/test_out_buffered_stdout.rb +17 -3
  108. data/test/plugin/test_out_forward.rb +93 -5
  109. data/test/plugin/test_out_stdout.rb +14 -3
  110. data/test/plugin/test_output_as_buffered_retries.rb +20 -0
  111. data/test/plugin/test_output_as_buffered_secondary.rb +16 -0
  112. data/test/plugin/test_output_as_standard.rb +22 -22
  113. data/test/plugin/test_parser_apache.rb +13 -9
  114. data/test/plugin/test_parser_apache_error.rb +11 -6
  115. data/test/plugin/test_parser_csv.rb +35 -25
  116. data/test/plugin/test_parser_nginx.rb +11 -5
  117. data/test/plugin/test_parser_regexp.rb +235 -68
  118. data/test/plugin/test_parser_tsv.rb +54 -58
  119. data/test/plugin_helper/test_compat_parameters.rb +111 -46
  120. data/test/plugin_helper/test_formatter.rb +40 -0
  121. data/test/plugin_helper/test_inject.rb +101 -2
  122. data/test/plugin_helper/test_parser.rb +40 -0
  123. data/test/plugin_helper/test_storage.rb +43 -0
  124. data/test/test_event.rb +93 -0
  125. data/test/test_event_router.rb +13 -4
  126. data/test/test_event_time.rb +0 -3
  127. data/test/test_formatter.rb +7 -164
  128. data/test/test_plugin_classes.rb +28 -1
  129. metadata +24 -3
@@ -17,13 +17,15 @@
17
17
  require 'cool.io'
18
18
  require 'yajl'
19
19
 
20
- require 'fluent/input'
20
+ require 'fluent/plugin/input'
21
21
  require 'fluent/config/error'
22
- require 'fluent/parser'
22
+ require 'fluent/plugin/parser'
23
23
 
24
- module Fluent
24
+ module Fluent::Plugin
25
25
  class SyslogInput < Input
26
- Plugin.register_input('syslog', self)
26
+ Fluent::Plugin.register_input('syslog', self)
27
+
28
+ helpers :parser, :event_loop
27
29
 
28
30
  SYSLOG_REGEXP = /^\<([0-9]+)\>(.*)/
29
31
 
@@ -92,6 +94,7 @@ module Fluent
92
94
  desc 'Specify key of source host when include_source_host is true.'
93
95
  config_param :source_host_key, :string, default: 'source_host'.freeze
94
96
  config_param :blocking_timeout, :time, default: 0.5
97
+ config_param :message_length_limit, :size, default: 2048
95
98
 
96
99
  def configure(conf)
97
100
  super
@@ -99,14 +102,13 @@ module Fluent
99
102
  @use_default = false
100
103
 
101
104
  if conf.has_key?('format')
102
- @parser = Plugin.new_parser(conf['format'])
103
- @parser.configure(conf)
105
+ @parser = parser_create(usage: 'syslog_input', type: conf['format'], conf: conf)
104
106
  else
105
107
  conf['with_priority'] = true
106
- @parser = TextParser::SyslogParser.new
107
- @parser.configure(conf)
108
+ @parser = parser_create(usage: 'syslog_input', type: 'syslog', conf: conf)
108
109
  @use_default = true
109
110
  end
111
+ @_event_loop_run_timeout = @blocking_timeout
110
112
  end
111
113
 
112
114
  def start
@@ -118,29 +120,16 @@ module Fluent
118
120
  method(:receive_data_parser)
119
121
  end
120
122
 
121
- @loop = Coolio::Loop.new
122
123
  @handler = listen(callback)
123
- @loop.attach(@handler)
124
-
125
- @thread = Thread.new(&method(:run))
124
+ event_loop_attach(@handler)
126
125
  end
127
126
 
128
127
  def shutdown
129
- @loop.watchers.each {|w| w.detach }
130
- @loop.stop
131
128
  @handler.close
132
- @thread.join
133
129
 
134
130
  super
135
131
  end
136
132
 
137
- def run
138
- @loop.run(@blocking_timeout)
139
- rescue
140
- log.error "unexpected error", error: $!.to_s
141
- log.error_backtrace
142
- end
143
-
144
133
  private
145
134
 
146
135
  def receive_data_parser(data, addr)
@@ -193,11 +182,11 @@ module Fluent
193
182
  client = ServerEngine::SocketManager::Client.new(socket_manager_path)
194
183
  if @protocol_type == :udp
195
184
  @usock = client.listen_udp(@bind, @port)
196
- SocketUtil::UdpHandler.new(@usock, log, 2048, callback)
185
+ Fluent::SocketUtil::UdpHandler.new(@usock, log, @message_length_limit, callback)
197
186
  else
198
187
  # syslog family add "\n" to each message and this seems only way to split messages in tcp stream
199
188
  lsock = client.listen_tcp(@bind, @port)
200
- Coolio::TCPServer.new(lsock, nil, SocketUtil::TcpHandler, log, "\n", callback)
189
+ Coolio::TCPServer.new(lsock, nil, Fluent::SocketUtil::TcpHandler, log, "\n", callback)
201
190
  end
202
191
  end
203
192
 
@@ -56,6 +56,9 @@ module Fluent
56
56
  config_param :pos_file, :string, default: nil
57
57
  desc 'Start to read the logs from the head of file, not bottom.'
58
58
  config_param :read_from_head, :bool, default: false
59
+ # When the program deletes log file and re-creates log file with same filename after passed refresh_interval,
60
+ # in_tail may raise a pos_file related error. This is a known issue but there is no such program on production.
61
+ # If we find such program / application, we will fix the problem.
59
62
  desc 'The interval of refreshing the list of watch file.'
60
63
  config_param :refresh_interval, :time, default: 60
61
64
  desc 'The number of reading lines at each IO.'
@@ -20,14 +20,20 @@ module Fluent::Plugin
20
20
  class BufferedStdoutOutput < Output
21
21
  Fluent::Plugin.register_output('buffered_stdout', self)
22
22
 
23
- desc 'Output format.(json,hash)'
24
- config_param :output_type, default: 'json'
23
+ helpers :formatter, :inject, :compat_parameters
24
+
25
25
  config_section :buffer do
26
26
  config_set_default :chunk_keys, ['tag']
27
27
  config_set_default :flush_at_shutdown, true
28
28
  config_set_default :chunk_limit_size, 10 * 1024
29
29
  end
30
30
 
31
+ DEFAULT_FORMAT_TYPE = 'json'
32
+
33
+ config_section :format do
34
+ config_set_default :@type, DEFAULT_FORMAT_TYPE
35
+ end
36
+
31
37
  attr_accessor :delayed
32
38
 
33
39
  def initialize
@@ -40,9 +46,12 @@ module Fluent::Plugin
40
46
  end
41
47
 
42
48
  def configure(conf)
49
+ if conf['output_type'] && !conf['format']
50
+ conf['format'] = conf['output_type']
51
+ end
52
+ compat_parameters_convert(conf, :inject, :formatter)
43
53
  super
44
- @formatter = Fluent::Plugin.new_formatter(@output_type, parent: self)
45
- @formatter.configure(conf)
54
+ @formatter = formatter_create(conf: conf.elements('format').first, default_type: DEFAULT_FORMAT_TYPE)
46
55
  end
47
56
 
48
57
  def write(chunk)
@@ -54,6 +63,7 @@ module Fluent::Plugin
54
63
  end
55
64
 
56
65
  def format(tag, time, record)
66
+ record = inject_values_to_record(tag, time, record)
57
67
  "#{Time.at(time).localtime} #{tag}: #{@formatter.format(tag, time, record).chomp}\n"
58
68
  end
59
69
  end
@@ -39,9 +39,9 @@ module Fluent::Plugin
39
39
  config_param :time_key, :string, default: nil
40
40
  desc 'The format for event time used when the time_key parameter is specified. The default is UNIX time (integer).'
41
41
  config_param :time_format, :string, default: nil
42
- desc "The format used to map the incoming events to the program input. (#{ExecUtil::SUPPORTED_FORMAT.keys.join(',')})"
42
+ desc "The format used to map the incoming events to the program input. (#{Fluent::ExecUtil::SUPPORTED_FORMAT.keys.join(',')})"
43
43
  config_param :format, default: :tsv, skip_accessor: true do |val|
44
- f = ExecUtil::SUPPORTED_FORMAT[val]
44
+ f = Fluent::ExecUtil::SUPPORTED_FORMAT[val]
45
45
  raise ConfigError, "Unsupported format '#{val}'" unless f
46
46
  f
47
47
  end
@@ -53,6 +53,8 @@ module Fluent::Plugin
53
53
  end
54
54
 
55
55
  def configure(conf)
56
+ compat_parameters_convert(conf, :buffer, default_chunk_key: 'time')
57
+
56
58
  super
57
59
 
58
60
  @formatter = case @format
@@ -60,11 +62,11 @@ module Fluent::Plugin
60
62
  if @keys.empty?
61
63
  raise Fluent::ConfigError, "keys option is required on exec output for tsv format"
62
64
  end
63
- ExecUtil::TSVFormatter.new(@keys)
65
+ Fluent::ExecUtil::TSVFormatter.new(@keys)
64
66
  when :json
65
- ExecUtil::JSONFormatter.new
67
+ Fluent::ExecUtil::JSONFormatter.new
66
68
  when :msgpack
67
- ExecUtil::MessagePackFormatter.new
69
+ Fluent::ExecUtil::MessagePackFormatter.new
68
70
  end
69
71
 
70
72
  if @time_key
@@ -38,9 +38,9 @@ module Fluent
38
38
  config_param :remove_prefix, :string, default: nil
39
39
  config_param :add_prefix, :string, default: nil
40
40
 
41
- desc "The format used to map the incoming event to the program input.(#{ExecUtil::SUPPORTED_FORMAT.keys.join(',')})"
41
+ desc "The format used to map the incoming event to the program input.(#{Fluent::ExecUtil::SUPPORTED_FORMAT.keys.join(',')})"
42
42
  config_param :in_format, default: :tsv do |val|
43
- f = ExecUtil::SUPPORTED_FORMAT[val]
43
+ f = Fluent::ExecUtil::SUPPORTED_FORMAT[val]
44
44
  raise ConfigError, "Unsupported in_format '#{val}'" unless f
45
45
  f
46
46
  end
@@ -55,9 +55,9 @@ module Fluent
55
55
  desc 'The format for event time used when the in_time_key parameter is specified.(Defauls is UNIX time)'
56
56
  config_param :in_time_format, default: nil
57
57
 
58
- desc "The format used to process the program output.(#{ExecUtil::SUPPORTED_FORMAT.keys.join(',')})"
58
+ desc "The format used to process the program output.(#{Fluent::ExecUtil::SUPPORTED_FORMAT.keys.join(',')})"
59
59
  config_param :out_format, default: :tsv do |val|
60
- f = ExecUtil::SUPPORTED_FORMAT[val]
60
+ f = Fluent::ExecUtil::SUPPORTED_FORMAT[val]
61
61
  raise ConfigError, "Unsupported out_format '#{val}'" unless f
62
62
  f
63
63
  end
@@ -169,11 +169,11 @@ module Fluent
169
169
  if @in_keys.empty?
170
170
  raise ConfigError, "in_keys option is required on exec_filter output for tsv in_format"
171
171
  end
172
- @formatter = ExecUtil::TSVFormatter.new(@in_keys)
172
+ @formatter = Fluent::ExecUtil::TSVFormatter.new(@in_keys)
173
173
  when :json
174
- @formatter = ExecUtil::JSONFormatter.new
174
+ @formatter = Fluent::ExecUtil::JSONFormatter.new
175
175
  when :msgpack
176
- @formatter = ExecUtil::MessagePackFormatter.new
176
+ @formatter = Fluent::ExecUtil::MessagePackFormatter.new
177
177
  end
178
178
 
179
179
  case @out_format
@@ -181,11 +181,11 @@ module Fluent
181
181
  if @out_keys.empty?
182
182
  raise ConfigError, "out_keys option is required on exec_filter output for tsv in_format"
183
183
  end
184
- @parser = ExecUtil::TSVParser.new(@out_keys, method(:on_message))
184
+ @parser = Fluent::ExecUtil::TSVParser.new(@out_keys, method(:on_message))
185
185
  when :json
186
- @parser = ExecUtil::JSONParser.new(method(:on_message))
186
+ @parser = Fluent::ExecUtil::JSONParser.new(method(:on_message))
187
187
  when :msgpack
188
- @parser = ExecUtil::MessagePackParser.new(method(:on_message))
188
+ @parser = Fluent::ExecUtil::MessagePackParser.new(method(:on_message))
189
189
  end
190
190
 
191
191
  @respawns = if @child_respawn.nil? or @child_respawn == 'none' or @child_respawn == '0'
@@ -103,9 +103,7 @@ module Fluent
103
103
  @formatter.configure(conf)
104
104
 
105
105
  if @symlink_path && @buffer.respond_to?(:path)
106
- (class << @buffer; self; end).module_eval do
107
- prepend SymlinkBufferMixin
108
- end
106
+ @buffer.extend SymlinkBufferMixin
109
107
  @buffer.symlink_path = @symlink_path
110
108
  end
111
109
 
@@ -78,8 +78,6 @@ module Fluent
78
78
  desc 'Use the "Phi accrual failure detector" to detect server failure.'
79
79
  config_param :phi_failure_detector, :bool, default: true
80
80
 
81
- # if any options added that requires extended forward api, fix @extend_internal_protocol
82
-
83
81
  desc 'Change the protocol to at-least-once.'
84
82
  config_param :require_ack_response, :bool, default: false # require in_forward to respond with ack
85
83
  desc 'This option is used when require_ack_response is true.'
@@ -95,8 +93,6 @@ module Fluent
95
93
  config_param :port, :integer, default: LISTEN_PORT
96
94
  config_param :host, :string, default: nil
97
95
 
98
- attr_accessor :extend_internal_protocol
99
-
100
96
  def configure(conf)
101
97
  super
102
98
 
@@ -112,13 +108,6 @@ module Fluent
112
108
 
113
109
  recover_sample_size = @recover_wait / @heartbeat_interval
114
110
 
115
- # add options here if any options addes which uses extended protocol
116
- @extend_internal_protocol = if @require_ack_response
117
- true
118
- else
119
- false
120
- end
121
-
122
111
  if @dns_round_robin
123
112
  if @heartbeat_type == :udp
124
113
  raise ConfigError, "forward output heartbeat type must be 'tcp' or 'none' to use dns_round_robin option"
@@ -276,16 +265,10 @@ module Fluent
276
265
  @weight_array = weight_array
277
266
  end
278
267
 
279
- # MessagePack FixArray length = 3 (if @extend_internal_protocol)
280
- # = 2 (else)
281
- FORWARD_HEADER = [0x92].pack('C').freeze
282
- FORWARD_HEADER_EXT = [0x93].pack('C').freeze
268
+ # MessagePack FixArray length is 3
269
+ FORWARD_HEADER = [0x93].pack('C').freeze
283
270
  def forward_header
284
- if @extend_internal_protocol
285
- FORWARD_HEADER_EXT
286
- else
287
- FORWARD_HEADER
288
- end
271
+ FORWARD_HEADER
289
272
  end
290
273
 
291
274
  #FORWARD_TCP_HEARTBEAT_DATA = FORWARD_HEADER + ''.to_msgpack + [].to_msgpack
@@ -314,7 +297,7 @@ module Fluent
314
297
  opt = [@send_timeout.to_i, 0].pack('L!L!') # struct timeval
315
298
  sock.setsockopt(Socket::SOL_SOCKET, Socket::SO_SNDTIMEO, opt)
316
299
 
317
- # beginArray(2)
300
+ # beginArray(3)
318
301
  sock.write forward_header
319
302
 
320
303
  # writeRaw(tag)
@@ -336,48 +319,46 @@ module Fluent
336
319
  # writeRawBody(packed_es)
337
320
  chunk.write_to(sock)
338
321
 
339
- if @extend_internal_protocol
340
- option = {}
341
- option['chunk'] = Base64.encode64(chunk.unique_id) if @require_ack_response
342
- sock.write option.to_msgpack
343
-
344
- if @require_ack_response && @ack_response_timeout > 0
345
- # Waiting for a response here results in a decrease of throughput because a chunk queue is locked.
346
- # To avoid a decrease of troughput, it is necessary to prepare a list of chunks that wait for responses
347
- # and process them asynchronously.
348
- if IO.select([sock], nil, nil, @ack_response_timeout)
349
- raw_data = sock.recv(1024)
350
-
351
- # When connection is closed by remote host, socket is ready to read and #recv returns an empty string that means EOF.
352
- # If this happens we assume the data wasn't delivered and retry it.
353
- if raw_data.empty?
354
- @log.warn "node #{node.host}:#{node.port} closed the connection. regard it as unavailable."
355
- node.disable!
356
- raise ForwardOutputConnectionClosedError, "node #{node.host}:#{node.port} closed connection"
357
- else
358
- # Serialization type of the response is same as sent data.
359
- res = MessagePack.unpack(raw_data)
360
-
361
- if res['ack'] != option['chunk']
362
- # Some errors may have occured when ack and chunk id is different, so send the chunk again.
363
- raise ForwardOutputResponseError, "ack in response and chunk id in sent data are different"
364
- end
365
- end
366
-
367
- else
368
- # IO.select returns nil on timeout.
369
- # There are 2 types of cases when no response has been received:
370
- # (1) the node does not support sending responses
371
- # (2) the node does support sending response but responses have not arrived for some reasons.
372
- @log.warn "no response from #{node.host}:#{node.port}. regard it as unavailable."
322
+ option = { 'size' => chunk.size_of_events }
323
+ option['chunk'] = Base64.encode64(chunk.unique_id) if @require_ack_response
324
+ sock.write option.to_msgpack
325
+
326
+ if @require_ack_response && @ack_response_timeout > 0
327
+ # Waiting for a response here results in a decrease of throughput because a chunk queue is locked.
328
+ # To avoid a decrease of troughput, it is necessary to prepare a list of chunks that wait for responses
329
+ # and process them asynchronously.
330
+ if IO.select([sock], nil, nil, @ack_response_timeout)
331
+ raw_data = sock.recv(1024)
332
+
333
+ # When connection is closed by remote host, socket is ready to read and #recv returns an empty string that means EOF.
334
+ # If this happens we assume the data wasn't delivered and retry it.
335
+ if raw_data.empty?
336
+ @log.warn "node #{node.host}:#{node.port} closed the connection. regard it as unavailable."
373
337
  node.disable!
374
- raise ForwardOutputACKTimeoutError, "node #{node.host}:#{node.port} does not return ACK"
338
+ raise ForwardOutputConnectionClosedError, "node #{node.host}:#{node.port} closed connection"
339
+ else
340
+ # Serialization type of the response is same as sent data.
341
+ res = MessagePack.unpack(raw_data)
342
+
343
+ if res['ack'] != option['chunk']
344
+ # Some errors may have occured when ack and chunk id is different, so send the chunk again.
345
+ raise ForwardOutputResponseError, "ack in response and chunk id in sent data are different"
346
+ end
375
347
  end
348
+
349
+ else
350
+ # IO.select returns nil on timeout.
351
+ # There are 2 types of cases when no response has been received:
352
+ # (1) the node does not support sending responses
353
+ # (2) the node does support sending response but responses have not arrived for some reasons.
354
+ @log.warn "no response from #{node.host}:#{node.port}. regard it as unavailable."
355
+ node.disable!
356
+ raise ForwardOutputACKTimeoutError, "node #{node.host}:#{node.port} does not return ACK"
376
357
  end
377
358
  end
378
359
 
379
360
  node.heartbeat(false)
380
- return res # for test
361
+ res # for test
381
362
  ensure
382
363
  sock.close_write
383
364
  sock.close
@@ -20,18 +20,27 @@ module Fluent::Plugin
20
20
  class StdoutOutput < Output
21
21
  Fluent::Plugin.register_output('stdout', self)
22
22
 
23
- desc 'Output format.(json,hash)'
24
- config_param :output_type, default: 'json'
23
+ helpers :inject, :formatter, :compat_parameters
24
+
25
+ DEFAULT_FORMAT_TYPE = 'json'
26
+
27
+ config_section :format do
28
+ config_set_default :@type, DEFAULT_FORMAT_TYPE
29
+ end
25
30
 
26
31
  def configure(conf)
32
+ if conf['output_type'] && !conf['format']
33
+ conf['format'] = conf['output_type']
34
+ end
35
+ compat_parameters_convert(conf, :inject, :formatter)
27
36
  super
28
- @formatter = Fluent::Plugin.new_formatter(@output_type, parent: self)
29
- @formatter.configure(conf)
37
+ @formatter = formatter_create(conf: conf.elements('format').first, default_type: DEFAULT_FORMAT_TYPE)
30
38
  end
31
39
 
32
40
  def process(tag, es)
33
41
  es.each {|time,record|
34
- $log.write "#{Time.at(time).localtime} #{tag}: #{@formatter.format(tag, time, record).chomp}\n"
42
+ r = inject_values_to_record(tag, time, record)
43
+ $log.write "#{Time.at(time).localtime} #{tag}: #{@formatter.format(tag, time, r).chomp}\n"
35
44
  }
36
45
  $log.flush
37
46
  end
@@ -25,7 +25,10 @@ module Fluent
25
25
  class StreamOutput < BufferedOutput
26
26
  config_param :send_timeout, :time, default: 60
27
27
 
28
+ helpers :compat_parameters
29
+
28
30
  def configure(conf)
31
+ compat_parameters_convert(conf, :buffer)
29
32
  super
30
33
  end
31
34
 
@@ -627,6 +627,18 @@ module Fluent
627
627
  end
628
628
  end
629
629
 
630
+ FORMAT_MSGPACK_STREAM = ->(e){ e.to_msgpack_stream }
631
+ FORMAT_MSGPACK_STREAM_TIME_INT = ->(e){ e.to_msgpack_stream(time_int: true) }
632
+
633
+ # metadata_and_data is a Hash of:
634
+ # (standard format) metadata => event stream
635
+ # (custom format) metadata => array of formatted event
636
+ # For standard format, formatting should be done for whole event stream, but
637
+ # "whole event stream" may be a split of "es" here when it's bigger than chunk_limit_size.
638
+ # `@buffer.write` will do this splitting.
639
+ # For custom format, formatting will be done here. Custom formatting always requires
640
+ # iteration of event stream, and it should be done just once even if total event stream size
641
+ # is biggar than chunk_limit_size because of performance.
630
642
  def handle_stream_with_custom_format(tag, es, enqueue: false)
631
643
  meta_and_data = {}
632
644
  records = 0
@@ -637,13 +649,14 @@ module Fluent
637
649
  records += 1
638
650
  end
639
651
  write_guard do
640
- @buffer.write(meta_and_data, bulk: false, enqueue: enqueue)
652
+ @buffer.write(meta_and_data, enqueue: enqueue)
641
653
  end
642
654
  @counters_monitor.synchronize{ @emit_records += records }
643
655
  true
644
656
  end
645
657
 
646
658
  def handle_stream_with_standard_format(tag, es, enqueue: false)
659
+ format_proc = @time_as_integer ? FORMAT_MSGPACK_STREAM_TIME_INT : FORMAT_MSGPACK_STREAM
647
660
  meta_and_data = {}
648
661
  records = 0
649
662
  es.each do |time, record|
@@ -652,41 +665,37 @@ module Fluent
652
665
  meta_and_data[meta].add(time, record)
653
666
  records += 1
654
667
  end
655
- meta_and_data_bulk = {}
656
- meta_and_data.each_pair do |meta, m_es|
657
- meta_and_data_bulk[meta] = [m_es.to_msgpack_stream(time_int: @time_as_integer), m_es.size]
658
- end
659
668
  write_guard do
660
- @buffer.write(meta_and_data_bulk, bulk: true, enqueue: enqueue)
669
+ @buffer.write(meta_and_data, format: format_proc, enqueue: enqueue)
661
670
  end
662
671
  @counters_monitor.synchronize{ @emit_records += records }
663
672
  true
664
673
  end
665
674
 
666
675
  def handle_stream_simple(tag, es, enqueue: false)
676
+ format_proc = nil
667
677
  meta = metadata((@chunk_key_tag ? tag : nil), nil, nil)
668
678
  records = es.size
669
679
  if @custom_format
670
680
  records = 0
671
- es_size = 0
672
- es_bulk = ''
673
- es.each do |time,record|
674
- es_bulk << format(tag, time, record)
675
- es_size += 1
681
+ data = []
682
+ es.each do |time, record|
683
+ data << format(tag, time, record)
676
684
  records += 1
677
685
  end
678
686
  else
679
- es_size = es.size
680
- es_bulk = es.to_msgpack_stream(time_int: @time_as_integer)
687
+ format_proc = @time_as_integer ? FORMAT_MSGPACK_STREAM_TIME_INT : FORMAT_MSGPACK_STREAM
688
+ data = es
681
689
  end
682
690
  write_guard do
683
- @buffer.write({meta => [es_bulk, es_size]}, bulk: true, enqueue: enqueue)
691
+ @buffer.write({meta => data}, format: format_proc, enqueue: enqueue)
684
692
  end
685
693
  @counters_monitor.synchronize{ @emit_records += records }
686
694
  true
687
695
  end
688
696
 
689
697
  def commit_write(chunk_id, delayed: @delayed_commit, secondary: false)
698
+ log.trace "committing write operation to a chunk", chunk: dump_unique_id_hex(chunk_id), delayed: delayed
690
699
  if delayed
691
700
  @dequeued_chunks_mutex.synchronize do
692
701
  @dequeued_chunks.delete_if{ |info| info.chunk_id == chunk_id }
@@ -762,6 +771,8 @@ module Fluent
762
771
  chunk = @buffer.dequeue_chunk
763
772
  return unless chunk
764
773
 
774
+ log.debug "trying flush for a chunk", chunk: dump_unique_id_hex(chunk.unique_id)
775
+
765
776
  output = self
766
777
  using_secondary = false
767
778
  if @retry_mutex.synchronize{ @retry && @retry.secondary? }
@@ -775,6 +786,7 @@ module Fluent
775
786
 
776
787
  begin
777
788
  if output.delayed_commit
789
+ log.trace "executing delayed write and commit", chunk: dump_unique_id_hex(chunk.unique_id)
778
790
  @counters_monitor.synchronize{ @write_count += 1 }
779
791
  output.try_write(chunk)
780
792
  @dequeued_chunks_mutex.synchronize do
@@ -783,9 +795,14 @@ module Fluent
783
795
  end
784
796
  else # output plugin without delayed purge
785
797
  chunk_id = chunk.unique_id
798
+ dump_chunk_id = dump_unique_id_hex(chunk_id)
799
+ log.trace "adding write count", instance: self.object_id
786
800
  @counters_monitor.synchronize{ @write_count += 1 }
801
+ log.trace "executing sync write", chunk: dump_chunk_id
787
802
  output.write(chunk)
803
+ log.trace "write operation done, committing", chunk: dump_chunk_id
788
804
  commit_write(chunk_id, secondary: using_secondary)
805
+ log.trace "done to commit a chunk", chunk: dump_chunk_id
789
806
  end
790
807
  rescue => e
791
808
  log.debug "taking back chunk for errors.", plugin_id: plugin_id, chunk: dump_unique_id_hex(chunk.unique_id)