fluentd 0.14.11-x86-mingw32 → 0.14.12-x86-mingw32

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of fluentd might be problematic. Click here for more details.

Files changed (119) hide show
  1. checksums.yaml +4 -4
  2. data/.travis.yml +1 -5
  3. data/ChangeLog +54 -2
  4. data/example/in_dummy_blocks.conf +17 -0
  5. data/example/in_forward_tls.conf +14 -0
  6. data/example/in_forward_workers.conf +21 -0
  7. data/example/logevents.conf +25 -0
  8. data/example/out_forward_heartbeat_none.conf +16 -0
  9. data/example/out_forward_tls.conf +18 -0
  10. data/example/suppress_config_dump.conf +7 -0
  11. data/lib/fluent/agent.rb +3 -32
  12. data/lib/fluent/clock.rb +62 -0
  13. data/lib/fluent/command/fluentd.rb +12 -0
  14. data/lib/fluent/compat/input.rb +10 -1
  15. data/lib/fluent/compat/output.rb +40 -1
  16. data/lib/fluent/config/configure_proxy.rb +30 -7
  17. data/lib/fluent/config/section.rb +4 -0
  18. data/lib/fluent/config/types.rb +2 -2
  19. data/lib/fluent/configurable.rb +31 -5
  20. data/lib/fluent/engine.rb +61 -12
  21. data/lib/fluent/event_router.rb +6 -0
  22. data/lib/fluent/load.rb +0 -1
  23. data/lib/fluent/log.rb +118 -42
  24. data/lib/fluent/match.rb +37 -0
  25. data/lib/fluent/plugin.rb +25 -3
  26. data/lib/fluent/plugin/base.rb +4 -0
  27. data/lib/fluent/plugin/buf_file.rb +38 -14
  28. data/lib/fluent/plugin/buffer.rb +20 -20
  29. data/lib/fluent/plugin/buffer/file_chunk.rb +2 -2
  30. data/lib/fluent/plugin/compressable.rb +1 -0
  31. data/lib/fluent/plugin/filter_record_transformer.rb +3 -6
  32. data/lib/fluent/plugin/formatter_csv.rb +4 -1
  33. data/lib/fluent/plugin/formatter_hash.rb +5 -1
  34. data/lib/fluent/plugin/formatter_json.rb +10 -0
  35. data/lib/fluent/plugin/formatter_ltsv.rb +2 -1
  36. data/lib/fluent/plugin/in_dummy.rb +4 -0
  37. data/lib/fluent/plugin/in_exec.rb +4 -0
  38. data/lib/fluent/plugin/in_forward.rb +11 -3
  39. data/lib/fluent/plugin/in_gc_stat.rb +4 -0
  40. data/lib/fluent/plugin/in_http.rb +4 -0
  41. data/lib/fluent/plugin/in_monitor_agent.rb +29 -2
  42. data/lib/fluent/plugin/in_object_space.rb +4 -1
  43. data/lib/fluent/plugin/in_syslog.rb +4 -0
  44. data/lib/fluent/plugin/in_tail.rb +193 -116
  45. data/lib/fluent/plugin/in_tcp.rb +5 -1
  46. data/lib/fluent/plugin/in_udp.rb +4 -0
  47. data/lib/fluent/plugin/input.rb +4 -0
  48. data/lib/fluent/plugin/out_copy.rb +4 -0
  49. data/lib/fluent/plugin/out_exec.rb +4 -0
  50. data/lib/fluent/plugin/out_exec_filter.rb +4 -0
  51. data/lib/fluent/plugin/out_file.rb +70 -30
  52. data/lib/fluent/plugin/out_forward.rb +132 -28
  53. data/lib/fluent/plugin/out_null.rb +10 -0
  54. data/lib/fluent/plugin/out_relabel.rb +4 -0
  55. data/lib/fluent/plugin/out_roundrobin.rb +4 -0
  56. data/lib/fluent/plugin/out_secondary_file.rb +5 -0
  57. data/lib/fluent/plugin/out_stdout.rb +5 -0
  58. data/lib/fluent/plugin/output.rb +18 -9
  59. data/lib/fluent/plugin/storage_local.rb +25 -2
  60. data/lib/fluent/plugin_helper/cert_option.rb +159 -0
  61. data/lib/fluent/plugin_helper/child_process.rb +6 -6
  62. data/lib/fluent/plugin_helper/compat_parameters.rb +1 -1
  63. data/lib/fluent/plugin_helper/event_loop.rb +29 -4
  64. data/lib/fluent/plugin_helper/inject.rb +14 -1
  65. data/lib/fluent/plugin_helper/server.rb +275 -31
  66. data/lib/fluent/plugin_helper/socket.rb +144 -4
  67. data/lib/fluent/plugin_helper/socket_option.rb +2 -17
  68. data/lib/fluent/plugin_helper/storage.rb +7 -1
  69. data/lib/fluent/plugin_helper/thread.rb +16 -4
  70. data/lib/fluent/registry.rb +26 -9
  71. data/lib/fluent/root_agent.rb +7 -3
  72. data/lib/fluent/supervisor.rb +37 -15
  73. data/lib/fluent/system_config.rb +37 -10
  74. data/lib/fluent/test.rb +2 -0
  75. data/lib/fluent/test/driver/base.rb +24 -26
  76. data/lib/fluent/test/helpers.rb +21 -0
  77. data/lib/fluent/version.rb +1 -1
  78. data/test/command/test_fluentd.rb +274 -4
  79. data/test/config/test_configurable.rb +154 -0
  80. data/test/config/test_configure_proxy.rb +180 -1
  81. data/test/config/test_system_config.rb +10 -0
  82. data/test/config/test_types.rb +1 -0
  83. data/test/plugin/test_base.rb +4 -0
  84. data/test/plugin/test_buf_file.rb +241 -9
  85. data/test/plugin/test_buffer.rb +11 -11
  86. data/test/plugin/test_buffer_file_chunk.rb +6 -6
  87. data/test/plugin/test_compressable.rb +3 -0
  88. data/test/plugin/test_filter.rb +4 -0
  89. data/test/plugin/test_filter_record_transformer.rb +20 -0
  90. data/test/plugin/test_formatter_csv.rb +9 -0
  91. data/test/plugin/test_formatter_hash.rb +35 -0
  92. data/test/plugin/test_formatter_json.rb +8 -0
  93. data/test/plugin/test_formatter_ltsv.rb +7 -0
  94. data/test/plugin/test_in_dummy.rb +7 -3
  95. data/test/plugin/test_in_monitor_agent.rb +43 -5
  96. data/test/plugin/test_in_tail.rb +97 -4
  97. data/test/plugin/test_input.rb +4 -0
  98. data/test/plugin/test_out_file.rb +46 -7
  99. data/test/plugin/test_out_forward.rb +59 -7
  100. data/test/plugin/test_output.rb +10 -4
  101. data/test/plugin/test_output_as_buffered.rb +37 -25
  102. data/test/plugin/test_output_as_buffered_compress.rb +1 -1
  103. data/test/plugin/test_output_as_buffered_retries.rb +6 -6
  104. data/test/plugin/test_output_as_buffered_secondary.rb +91 -31
  105. data/test/plugin/test_storage_local.rb +40 -1
  106. data/test/plugin_helper/test_child_process.rb +29 -28
  107. data/test/plugin_helper/test_compat_parameters.rb +1 -1
  108. data/test/plugin_helper/test_inject.rb +27 -9
  109. data/test/plugin_helper/test_server.rb +822 -50
  110. data/test/plugin_helper/test_storage.rb +11 -0
  111. data/test/plugin_helper/test_timer.rb +1 -0
  112. data/test/test_clock.rb +164 -0
  113. data/test/test_log.rb +146 -15
  114. data/test/test_plugin.rb +251 -0
  115. data/test/test_supervisor.rb +65 -57
  116. data/test/test_test_drivers.rb +2 -2
  117. metadata +18 -7
  118. data/lib/fluent/process.rb +0 -504
  119. data/test/test_process.rb +0 -48
@@ -138,4 +138,41 @@ module Fluent
138
138
  @patterns.any? {|pattern| pattern.match(str) }
139
139
  end
140
140
  end
141
+
142
+ class NoMatchMatch
143
+ def initialize(log)
144
+ @log = log
145
+ @count = 0
146
+ @warn_not_matched = true
147
+ end
148
+
149
+ def suppress_missing_match!
150
+ # for <label @FLUENT_LOG>
151
+ @warn_not_matched = false
152
+ end
153
+
154
+ def emit_events(tag, es)
155
+ return unless @warn_not_matched
156
+ # TODO use time instead of num of records
157
+ c = (@count += 1)
158
+ if c < 512
159
+ if Math.log(c) / Math.log(2) % 1.0 == 0
160
+ @log.warn "no patterns matched", tag: tag
161
+ return
162
+ end
163
+ else
164
+ if c % 512 == 0
165
+ @log.warn "no patterns matched", tag: tag
166
+ return
167
+ end
168
+ end
169
+ @log.on_trace { @log.trace "no patterns matched", tag: tag }
170
+ end
171
+
172
+ def start
173
+ end
174
+
175
+ def shutdown
176
+ end
177
+ end
141
178
  end
@@ -113,12 +113,12 @@ module Fluent
113
113
  end
114
114
 
115
115
  def self.new_parser(type, parent: nil)
116
- require 'fluent/parser'
117
-
118
116
  if type[0] == '/' && type[-1] == '/'
119
117
  # This usage is not recommended for new API... create RegexpParser directly
120
118
  require 'fluent/parser'
121
- Fluent::TextParser.lookup(type)
119
+ impl = Fluent::TextParser.lookup(type)
120
+ impl.extend FeatureAvailabilityChecker
121
+ impl
122
122
  else
123
123
  new_impl('parser', PARSER_REGISTRY, type, parent)
124
124
  end
@@ -155,7 +155,29 @@ module Fluent
155
155
  if parent && impl.respond_to?("owner=")
156
156
  impl.owner = parent
157
157
  end
158
+ impl.extend FeatureAvailabilityChecker
158
159
  impl
159
160
  end
161
+
162
+ module FeatureAvailabilityChecker
163
+ def configure(conf)
164
+ super
165
+
166
+ # extend plugin instance by this module
167
+ # to run this check after all #configure methods of plugins and plugin helpers
168
+ sysconf = if self.respond_to?(:owner) && owner.respond_to?(:system_config)
169
+ owner.system_config
170
+ elsif self.respond_to?(:system_config)
171
+ self.system_config
172
+ else
173
+ nil
174
+ end
175
+
176
+ if sysconf && sysconf.workers > 1 && !self.multi_workers_ready?
177
+ type = Fluent::Plugin.lookup_type_from_class(self.class)
178
+ raise Fluent::ConfigError, "Plugin '#{type}' does not support multi workers configuration (#{self.class})"
179
+ end
180
+ end
181
+ end
160
182
  end
161
183
  end
@@ -57,6 +57,10 @@ module Fluent
57
57
  self
58
58
  end
59
59
 
60
+ def multi_workers_ready?
61
+ true
62
+ end
63
+
60
64
  def string_safe_encoding(str)
61
65
  unless str.valid_encoding?
62
66
  log.info "invalid byte sequence is replaced in `#{str}`" if self.respond_to?(:log)
@@ -46,14 +46,20 @@ module Fluent
46
46
  def initialize
47
47
  super
48
48
  @symlink_path = nil
49
+ @multi_workers_available = false
50
+ @additional_resume_path = nil
49
51
  end
50
52
 
51
53
  def configure(conf)
52
54
  super
53
55
 
56
+ multi_workers_configured = owner.system_config.workers > 1 ? true : false
57
+
58
+ using_plugin_root_dir = false
54
59
  unless @path
55
60
  if root_dir = owner.plugin_root_dir
56
61
  @path = File.join(root_dir, 'buffer')
62
+ using_plugin_root_dir = true # plugin_root_dir path contains worker id
57
63
  else
58
64
  raise Fluent::ConfigError, "buffer path is not configured. specify 'path' in <buffer>"
59
65
  end
@@ -67,33 +73,47 @@ module Fluent
67
73
 
68
74
  @@buffer_paths[@path] = type_of_owner
69
75
 
70
- if File.exist?(@path)
71
- if File.directory?(@path)
76
+ specified_directory_exists = File.exist?(@path) && File.directory?(@path)
77
+ unexisting_path_for_directory = !File.exist?(@path) && !@path.include?('.*')
78
+
79
+ if specified_directory_exists || unexisting_path_for_directory # directory
80
+ if using_plugin_root_dir || !multi_workers_configured
72
81
  @path = File.join(@path, 'buffer.*.log')
73
- elsif File.basename(@path).include?('.*.')
74
- # valid path (buffer.*.log will be ignored)
75
- elsif File.basename(@path).end_with?('.*')
76
- @path = @path + '.log'
77
82
  else
78
- # existing file will be ignored
79
- @path = @path + '.*.log'
83
+ @path = File.join(@path, "worker#{fluentd_worker_id}", 'buffer.*.log')
84
+ if fluentd_worker_id == 0
85
+ # worker 0 always checks unflushed buffer chunks to be resumed (might be created while non-multi-worker configuration)
86
+ @additional_resume_path = File.join(File.expand_path("../../", @path), 'buffer.*.log')
87
+ end
80
88
  end
81
- else # path doesn't exist
89
+ @multi_workers_available = true
90
+ else # specified path is file path
82
91
  if File.basename(@path).include?('.*.')
83
- # valid path
92
+ # valid file path
84
93
  elsif File.basename(@path).end_with?('.*')
85
94
  @path = @path + '.log'
86
95
  else
87
- # path is handled as directory, and it will be created at #start
88
- @path = File.join(@path, 'buffer.*.log')
96
+ # existing file will be ignored
97
+ @path = @path + '.*.log'
89
98
  end
99
+ @multi_workers_available = false
90
100
  end
91
101
 
92
- unless @dir_permission
102
+ if @dir_permission
103
+ @dir_permission = @dir_permission.to_i(8) if @dir_permission.is_a?(String)
104
+ else
93
105
  @dir_permission = system_config.dir_permission || DIR_PERMISSION
94
106
  end
95
107
  end
96
108
 
109
+ # This method is called only when multi worker is configured
110
+ def multi_workers_ready?
111
+ unless @multi_workers_available
112
+ log.error "file buffer with multi workers should be configured to use directory 'path', or system root_dir and plugin id"
113
+ end
114
+ @multi_workers_available
115
+ end
116
+
97
117
  def buffer_path_for_test?
98
118
  caller_locations.each do |location|
99
119
  # Thread::Backtrace::Location#path returns base filename or absolute path.
@@ -120,7 +140,11 @@ module Fluent
120
140
  stage = {}
121
141
  queue = []
122
142
 
123
- Dir.glob(@path) do |path|
143
+ patterns = [@path]
144
+ patterns.unshift @additional_resume_path if @additional_resume_path
145
+ Dir.glob(patterns) do |path|
146
+ next unless File.file?(path)
147
+
124
148
  m = new_metadata() # this metadata will be overwritten by resuming .meta file content
125
149
  # so it should not added into @metadata_list for now
126
150
  mode = Fluent::Plugin::Buffer::FileChunk.assume_chunk_state(path)
@@ -47,10 +47,10 @@ module Fluent
47
47
 
48
48
  # If user specify this value and (chunk_size * queue_length) is smaller than total_size,
49
49
  # then total_size is automatically configured to that value
50
- config_param :queue_length_limit, :integer, default: nil
50
+ config_param :queue_limit_length, :integer, default: nil
51
51
 
52
52
  # optional new limitations
53
- config_param :chunk_records_limit, :integer, default: nil
53
+ config_param :chunk_limit_records, :integer, default: nil
54
54
 
55
55
  # if chunk size (or records) is 95% or more after #write, then that chunk will be enqueued
56
56
  config_param :chunk_full_threshold, :float, default: DEFAULT_CHUNK_FULL_THRESHOLD
@@ -73,8 +73,8 @@ module Fluent
73
73
 
74
74
  @chunk_limit_size = nil
75
75
  @total_limit_size = nil
76
- @queue_length_limit = nil
77
- @chunk_records_limit = nil
76
+ @queue_limit_length = nil
77
+ @chunk_limit_records = nil
78
78
 
79
79
  @stage = {} #=> Hash (metadata -> chunk) : not flushed yet
80
80
  @queue = [] #=> Array (chunks) : already flushed (not written)
@@ -92,8 +92,8 @@ module Fluent
92
92
  def configure(conf)
93
93
  super
94
94
 
95
- unless @queue_length_limit.nil?
96
- @total_limit_size = @chunk_limit_size * @queue_length_limit
95
+ unless @queue_limit_length.nil?
96
+ @total_limit_size = @chunk_limit_size * @queue_limit_length
97
97
  end
98
98
  end
99
99
 
@@ -142,7 +142,7 @@ module Fluent
142
142
 
143
143
  ## TODO: for back pressure feature
144
144
  # def used?(ratio)
145
- # @total_size_limit * ratio > @stage_size + @queue_size
145
+ # @total_limit_size * ratio > @stage_size + @queue_size
146
146
  # end
147
147
 
148
148
  def resume
@@ -312,7 +312,7 @@ module Fluent
312
312
  end
313
313
 
314
314
  def enqueue_chunk(metadata)
315
- log.debug "enqueueing chunk", instance: self.object_id, metadata: metadata
315
+ log.trace "enqueueing chunk", instance: self.object_id, metadata: metadata
316
316
  synchronize do
317
317
  chunk = @stage.delete(metadata)
318
318
  return nil unless chunk
@@ -334,7 +334,7 @@ module Fluent
334
334
  end
335
335
 
336
336
  def enqueue_unstaged_chunk(chunk)
337
- log.debug "enqueueing unstaged chunk", instance: self.object_id, metadata: chunk.metadata
337
+ log.trace "enqueueing unstaged chunk", instance: self.object_id, metadata: chunk.metadata
338
338
  synchronize do
339
339
  chunk.synchronize do
340
340
  metadata = chunk.metadata
@@ -347,7 +347,7 @@ module Fluent
347
347
  end
348
348
 
349
349
  def enqueue_all
350
- log.debug "enqueueing all chunks in buffer", instance: self.object_id
350
+ log.trace "enqueueing all chunks in buffer", instance: self.object_id
351
351
  synchronize do
352
352
  if block_given?
353
353
  @stage.keys.each do |metadata|
@@ -365,7 +365,7 @@ module Fluent
365
365
 
366
366
  def dequeue_chunk
367
367
  return nil if @queue.empty?
368
- log.debug "dequeueing a chunk", instance: self.object_id
368
+ log.trace "dequeueing a chunk", instance: self.object_id
369
369
  synchronize do
370
370
  chunk = @queue.shift
371
371
 
@@ -374,18 +374,18 @@ module Fluent
374
374
 
375
375
  @dequeued[chunk.unique_id] = chunk
376
376
  @queued_num[chunk.metadata] -= 1 # BUG if nil, 0 or subzero
377
- log.debug "chunk dequeued", instance: self.object_id, metadata: chunk.metadata
377
+ log.trace "chunk dequeued", instance: self.object_id, metadata: chunk.metadata
378
378
  chunk
379
379
  end
380
380
  end
381
381
 
382
382
  def takeback_chunk(chunk_id)
383
- log.debug "taking back a chunk", instance: self.object_id, chunk_id: dump_unique_id_hex(chunk_id)
383
+ log.trace "taking back a chunk", instance: self.object_id, chunk_id: dump_unique_id_hex(chunk_id)
384
384
  synchronize do
385
385
  chunk = @dequeued.delete(chunk_id)
386
386
  return false unless chunk # already purged by other thread
387
387
  @queue.unshift(chunk)
388
- log.debug "chunk taken back", instance: self.object_id, chunk_id: dump_unique_id_hex(chunk_id), metadata: chunk.metadata
388
+ log.trace "chunk taken back", instance: self.object_id, chunk_id: dump_unique_id_hex(chunk_id), metadata: chunk.metadata
389
389
  @queued_num[chunk.metadata] += 1 # BUG if nil
390
390
  end
391
391
  true
@@ -397,7 +397,7 @@ module Fluent
397
397
  return nil unless chunk # purged by other threads
398
398
 
399
399
  metadata = chunk.metadata
400
- log.debug "purging a chunk", instance: self.object_id, chunk_id: dump_unique_id_hex(chunk_id), metadata: metadata
400
+ log.trace "purging a chunk", instance: self.object_id, chunk_id: dump_unique_id_hex(chunk_id), metadata: metadata
401
401
  begin
402
402
  bytesize = chunk.bytesize
403
403
  chunk.purge
@@ -410,18 +410,18 @@ module Fluent
410
410
  if metadata && !@stage[metadata] && (!@queued_num[metadata] || @queued_num[metadata] < 1)
411
411
  @metadata_list.delete(metadata)
412
412
  end
413
- log.debug "chunk purged", instance: self.object_id, chunk_id: dump_unique_id_hex(chunk_id), metadata: metadata
413
+ log.trace "chunk purged", instance: self.object_id, chunk_id: dump_unique_id_hex(chunk_id), metadata: metadata
414
414
  end
415
415
  nil
416
416
  end
417
417
 
418
418
  def clear_queue!
419
- log.debug "clearing queue", instance: self.object_id
419
+ log.trace "clearing queue", instance: self.object_id
420
420
  synchronize do
421
421
  until @queue.empty?
422
422
  begin
423
423
  q = @queue.shift
424
- log.debug("purging a chunk in queue"){ {id: dump_unique_id_hex(chunk.unique_id), bytesize: chunk.bytesize, size: chunk.size} }
424
+ log.trace("purging a chunk in queue"){ {id: dump_unique_id_hex(chunk.unique_id), bytesize: chunk.bytesize, size: chunk.size} }
425
425
  q.purge
426
426
  rescue => e
427
427
  log.error "unexpected error while clearing buffer queue", error_class: e.class, error: e
@@ -433,11 +433,11 @@ module Fluent
433
433
  end
434
434
 
435
435
  def chunk_size_over?(chunk)
436
- chunk.bytesize > @chunk_limit_size || (@chunk_records_limit && chunk.size > @chunk_records_limit)
436
+ chunk.bytesize > @chunk_limit_size || (@chunk_limit_records && chunk.size > @chunk_limit_records)
437
437
  end
438
438
 
439
439
  def chunk_size_full?(chunk)
440
- chunk.bytesize >= @chunk_limit_size * @chunk_full_threshold || (@chunk_records_limit && chunk.size >= @chunk_records_limit * @chunk_full_threshold)
440
+ chunk.bytesize >= @chunk_limit_size * @chunk_full_threshold || (@chunk_limit_records && chunk.size >= @chunk_limit_records * @chunk_full_threshold)
441
441
  end
442
442
 
443
443
  class ShouldRetry < StandardError; end
@@ -42,12 +42,12 @@ module Fluent
42
42
 
43
43
  def initialize(metadata, path, mode, perm: system_config.file_permission || FILE_PERMISSION, compress: :text)
44
44
  super(metadata, compress: compress)
45
- @permission = perm
45
+ @permission = perm.is_a?(String) ? perm.to_i(8) : perm
46
46
  @bytesize = @size = @adding_bytes = @adding_size = 0
47
47
  @meta = nil
48
48
 
49
49
  case mode
50
- when :create then create_new_chunk(path, perm)
50
+ when :create then create_new_chunk(path, @permission)
51
51
  when :staged then load_existing_staged_chunk(path)
52
52
  when :queued then load_existing_enqueued_chunk(path)
53
53
  else
@@ -14,6 +14,7 @@
14
14
  # limitations under the License.
15
15
  #
16
16
 
17
+ require 'stringio'
17
18
  require 'zlib'
18
19
 
19
20
  module Fluent
@@ -101,6 +101,8 @@ module Fluent::Plugin
101
101
  if @renew_time_key && new_record.has_key?(@renew_time_key)
102
102
  time = Fluent::EventTime.from_time(Time.at(new_record[@renew_time_key].to_f))
103
103
  end
104
+ @remove_keys.each {|k| new_record.delete(k) } if @remove_keys
105
+
104
106
  new_es.add(time, new_record)
105
107
  rescue => e
106
108
  router.emit_error_event(tag, time, record, e)
@@ -129,7 +131,6 @@ module Fluent::Plugin
129
131
  new_record = @renew_record ? {} : record.dup
130
132
  @keep_keys.each {|k| new_record[k] = record[k]} if @keep_keys and @renew_record
131
133
  new_record.merge!(expand_placeholders(@map, placeholders))
132
- @remove_keys.each {|k| new_record.delete(k) } if @remove_keys
133
134
 
134
135
  new_record
135
136
  end
@@ -307,14 +308,10 @@ module Fluent::Plugin
307
308
  raise "failed to expand `#{str}` : error = #{e}"
308
309
  end
309
310
 
310
- class CleanroomExpander
311
+ class CleanroomExpander < BasicObject
311
312
  def expand(__str_to_eval__, tag, time, record, tag_parts, tag_prefix, tag_suffix, hostname)
312
313
  instance_eval(__str_to_eval__)
313
314
  end
314
-
315
- (Object.instance_methods).each do |m|
316
- undef_method m unless m.to_s =~ /^__|respond_to_missing\?|object_id|public_methods|instance_eval|method_missing|define_singleton_method|respond_to\?|new_ostruct_member/
317
- end
318
315
  end
319
316
  end
320
317
  end
@@ -29,6 +29,7 @@ module Fluent
29
29
  # "array" looks good for type of :fields, but this implementation removes tailing comma
30
30
  # TODO: Is it needed to support tailing comma?
31
31
  config_param :fields, :array, value_type: :string
32
+ config_param :add_newline, :bool, default: true
32
33
 
33
34
  def configure(conf)
34
35
  super
@@ -42,7 +43,9 @@ module Fluent
42
43
  row = @fields.map do |key|
43
44
  record[key]
44
45
  end
45
- CSV.generate_line(row, @generate_opts)
46
+ line = CSV.generate_line(row, @generate_opts)
47
+ line.chomp! unless @add_newline
48
+ line
46
49
  end
47
50
  end
48
51
  end
@@ -21,8 +21,12 @@ module Fluent
21
21
  class HashFormatter < Formatter
22
22
  Plugin.register_formatter('hash', self)
23
23
 
24
+ config_param :add_newline, :bool, default: true
25
+
24
26
  def format(tag, time, record)
25
- "#{record.to_s}\n"
27
+ line = record.to_s
28
+ line << "\n".freeze if @add_newline
29
+ line
26
30
  end
27
31
  end
28
32
  end
@@ -23,6 +23,7 @@ module Fluent
23
23
  Plugin.register_formatter('json', self)
24
24
 
25
25
  config_param :json_parser, :string, default: 'oj'
26
+ config_param :add_newline, :bool, default: true
26
27
 
27
28
  def configure(conf)
28
29
  super
@@ -35,11 +36,20 @@ module Fluent
35
36
  rescue LoadError
36
37
  @dump_proc = Yajl.method(:dump)
37
38
  end
39
+
40
+ # format json is used on various highload environment, so re-define method to skip if check
41
+ unless @add_newline
42
+ define_singleton_method(:format, method(:format_without_nl))
43
+ end
38
44
  end
39
45
 
40
46
  def format(tag, time, record)
41
47
  "#{@dump_proc.call(record)}\n"
42
48
  end
49
+
50
+ def format_without_nl(tag, time, record)
51
+ @dump_proc.call(record)
52
+ end
43
53
  end
44
54
  end
45
55
  end