fluentd 0.14.5-x86-mingw32 → 0.14.7-x86-mingw32

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of fluentd might be problematic. Click here for more details.

Files changed (103) hide show
  1. checksums.yaml +4 -4
  2. data/ChangeLog +55 -0
  3. data/bin/fluent-binlog-reader +7 -0
  4. data/example/in_dummy_with_compression.conf +23 -0
  5. data/lib/fluent/agent.rb +8 -12
  6. data/lib/fluent/command/binlog_reader.rb +234 -0
  7. data/lib/fluent/command/fluentd.rb +17 -1
  8. data/lib/fluent/compat/file_util.rb +1 -1
  9. data/lib/fluent/compat/output.rb +5 -1
  10. data/lib/fluent/config/configure_proxy.rb +18 -3
  11. data/lib/fluent/config/element.rb +1 -1
  12. data/lib/fluent/config/section.rb +1 -1
  13. data/lib/fluent/config/v1_parser.rb +1 -1
  14. data/lib/fluent/env.rb +1 -0
  15. data/lib/fluent/event.rb +49 -2
  16. data/lib/fluent/event_router.rb +6 -2
  17. data/lib/fluent/label.rb +8 -0
  18. data/lib/fluent/log.rb +30 -1
  19. data/lib/fluent/plugin.rb +1 -1
  20. data/lib/fluent/plugin/base.rb +3 -0
  21. data/lib/fluent/plugin/buf_file.rb +2 -2
  22. data/lib/fluent/plugin/buf_memory.rb +1 -1
  23. data/lib/fluent/plugin/buffer.rb +12 -2
  24. data/lib/fluent/plugin/buffer/chunk.rb +68 -7
  25. data/lib/fluent/plugin/buffer/file_chunk.rb +4 -4
  26. data/lib/fluent/plugin/buffer/memory_chunk.rb +4 -4
  27. data/lib/fluent/plugin/compressable.rb +91 -0
  28. data/lib/fluent/plugin/filter_grep.rb +4 -4
  29. data/lib/fluent/plugin/formatter.rb +2 -2
  30. data/lib/fluent/plugin/formatter_json.rb +2 -1
  31. data/lib/fluent/plugin/formatter_out_file.rb +3 -30
  32. data/lib/fluent/plugin/in_forward.rb +6 -5
  33. data/lib/fluent/plugin/in_monitor_agent.rb +7 -21
  34. data/lib/fluent/plugin/in_syslog.rb +1 -1
  35. data/lib/fluent/plugin/in_tail.rb +11 -2
  36. data/lib/fluent/plugin/multi_output.rb +63 -3
  37. data/lib/fluent/plugin/out_exec.rb +1 -1
  38. data/lib/fluent/plugin/out_file.rb +5 -1
  39. data/lib/fluent/plugin/out_forward.rb +17 -5
  40. data/lib/fluent/plugin/out_stdout.rb +2 -1
  41. data/lib/fluent/plugin/output.rb +205 -19
  42. data/lib/fluent/plugin/parser.rb +5 -49
  43. data/lib/fluent/plugin/parser_apache2.rb +1 -1
  44. data/lib/fluent/plugin/parser_json.rb +4 -4
  45. data/lib/fluent/plugin/parser_multiline.rb +5 -5
  46. data/lib/fluent/plugin/parser_regexp.rb +1 -2
  47. data/lib/fluent/plugin/parser_syslog.rb +2 -2
  48. data/lib/fluent/plugin/storage_local.rb +2 -1
  49. data/lib/fluent/plugin_helper.rb +1 -0
  50. data/lib/fluent/plugin_helper/compat_parameters.rb +39 -21
  51. data/lib/fluent/plugin_helper/extract.rb +92 -0
  52. data/lib/fluent/plugin_helper/inject.rb +10 -12
  53. data/lib/fluent/plugin_helper/thread.rb +23 -3
  54. data/lib/fluent/registry.rb +1 -1
  55. data/lib/fluent/root_agent.rb +2 -1
  56. data/lib/fluent/supervisor.rb +28 -8
  57. data/lib/fluent/test/base.rb +0 -7
  58. data/lib/fluent/test/driver/base.rb +1 -0
  59. data/lib/fluent/test/driver/output.rb +3 -0
  60. data/lib/fluent/test/helpers.rb +18 -0
  61. data/lib/fluent/test/input_test.rb +4 -2
  62. data/lib/fluent/test/log.rb +3 -1
  63. data/lib/fluent/time.rb +232 -1
  64. data/lib/fluent/timezone.rb +1 -1
  65. data/lib/fluent/version.rb +1 -1
  66. data/test/command/test_binlog_reader.rb +351 -0
  67. data/test/config/test_config_parser.rb +6 -0
  68. data/test/config/test_configurable.rb +47 -1
  69. data/test/helper.rb +0 -1
  70. data/test/plugin/test_buffer.rb +22 -2
  71. data/test/plugin/test_buffer_chunk.rb +34 -4
  72. data/test/plugin/test_buffer_file_chunk.rb +73 -0
  73. data/test/plugin/test_buffer_memory_chunk.rb +73 -0
  74. data/test/plugin/test_compressable.rb +81 -0
  75. data/test/plugin/test_formatter_json.rb +14 -1
  76. data/test/plugin/test_in_forward.rb +67 -3
  77. data/test/plugin/test_in_monitor_agent.rb +17 -1
  78. data/test/plugin/test_in_tail.rb +8 -8
  79. data/test/plugin/test_out_file.rb +0 -8
  80. data/test/plugin/test_out_forward.rb +85 -0
  81. data/test/plugin/test_out_secondary_file.rb +20 -12
  82. data/test/plugin/test_out_stdout.rb +11 -10
  83. data/test/plugin/test_output.rb +234 -0
  84. data/test/plugin/test_output_as_buffered.rb +223 -0
  85. data/test/plugin/test_output_as_buffered_compress.rb +165 -0
  86. data/test/plugin/test_parser_json.rb +8 -0
  87. data/test/plugin/test_parser_regexp.rb +1 -1
  88. data/test/plugin_helper/test_child_process.rb +2 -2
  89. data/test/plugin_helper/test_extract.rb +195 -0
  90. data/test/plugin_helper/test_inject.rb +0 -7
  91. data/test/scripts/fluent/plugin/formatter1/formatter_test1.rb +7 -0
  92. data/test/scripts/fluent/plugin/formatter2/formatter_test2.rb +7 -0
  93. data/test/test_event.rb +186 -0
  94. data/test/test_event_router.rb +1 -1
  95. data/test/test_formatter.rb +0 -7
  96. data/test/test_log.rb +121 -0
  97. data/test/test_plugin_classes.rb +62 -0
  98. data/test/test_root_agent.rb +125 -0
  99. data/test/test_supervisor.rb +25 -2
  100. data/test/test_time_formatter.rb +103 -7
  101. data/test/test_time_parser.rb +211 -0
  102. metadata +22 -4
  103. data/test/plugin/test_parser_time.rb +0 -46
@@ -80,6 +80,9 @@ module Fluent
80
80
  desc 'Enable client-side DNS round robin.'
81
81
  config_param :dns_round_robin, :bool, default: false # heartbeat_type 'udp' is not available for this
82
82
 
83
+ desc 'Compress buffered data.'
84
+ config_param :compress, :enum, list: [:text, :gzip], default: :text
85
+
83
86
  config_section :security, required: false, multi: false do
84
87
  desc 'The hostname'
85
88
  config_param :self_hostname, :string
@@ -137,6 +140,12 @@ module Fluent
137
140
  end
138
141
  end
139
142
 
143
+ if @compress == :gzip && @buffer.compress == :text
144
+ @buffer.compress = :gzip
145
+ elsif @compress == :text && @buffer.compress == :gzip
146
+ log.info "buffer is compressed. If you also want to save the bandwidth of a network, Add `compress` configuration in <match>"
147
+ end
148
+
140
149
  if @nodes.empty?
141
150
  raise ConfigError, "forward output plugin requires at least one <server> is required"
142
151
  end
@@ -331,6 +340,7 @@ module Fluent
331
340
  def initialize(sender, server, failure:)
332
341
  @sender = sender
333
342
  @log = sender.log
343
+ @compress = sender.compress
334
344
 
335
345
  @name = server.name
336
346
  @host = server.host
@@ -432,7 +442,7 @@ module Fluent
432
442
  raise ForwardOutputConnectionClosedError, "failed to establish connection with node #{@name}"
433
443
  end
434
444
 
435
- option = { 'size' => chunk.size_of_events }
445
+ option = { 'size' => chunk.size_of_events, 'compressed' => @compress }
436
446
  option['chunk'] = Base64.encode64(chunk.unique_id) if @sender.require_ack_response
437
447
 
438
448
  # out_forward always uses Raw32 type for content.
@@ -440,13 +450,15 @@ module Fluent
440
450
 
441
451
  sock.write @sender.forward_header # beginArray(3)
442
452
  sock.write tag.to_msgpack # 1. writeRaw(tag)
443
- sock.write [0xdb, chunk.size].pack('CN') # 2. beginRaw(size) raw32
444
- chunk.write_to(sock) # writeRawBody(packed_es)
453
+ chunk.open(compressed: @compress) do |chunk_io|
454
+ sock.write [0xdb, chunk_io.size].pack('CN') # 2. beginRaw(size) raw32
455
+ IO.copy_stream(chunk_io, sock) # writeRawBody(packed_es)
456
+ end
445
457
  sock.write option.to_msgpack # 3. writeOption(option)
446
458
 
447
459
  if @sender.require_ack_response
448
460
  # Waiting for a response here results in a decrease of throughput because a chunk queue is locked.
449
- # To avoid a decrease of troughput, it is necessary to prepare a list of chunks that wait for responses
461
+ # To avoid a decrease of throughput, it is necessary to prepare a list of chunks that wait for responses
450
462
  # and process them asynchronously.
451
463
  if IO.select([sock], nil, nil, @sender.ack_response_timeout)
452
464
  raw_data = begin
@@ -510,7 +522,7 @@ module Fluent
510
522
  sock.close
511
523
  end
512
524
  when :udp
513
- @usock.send "\0", 0, Socket.pack_sockaddr_in(n.port, n.resolved_host)
525
+ @usock.send "\0", 0, Socket.pack_sockaddr_in(@port, resolved_host)
514
526
  when :none # :none doesn't use this class
515
527
  raise "BUG: heartbeat_type none must not use Node"
516
528
  else
@@ -23,6 +23,7 @@ module Fluent::Plugin
23
23
  helpers :inject, :formatter, :compat_parameters
24
24
 
25
25
  DEFAULT_FORMAT_TYPE = 'json'
26
+ TIME_FORMAT = '%Y-%m-%d %H:%M:%S.%9N %z'
26
27
 
27
28
  config_section :buffer do
28
29
  config_set_default :chunk_keys, ['tag']
@@ -69,7 +70,7 @@ module Fluent::Plugin
69
70
 
70
71
  def format(tag, time, record)
71
72
  record = inject_values_to_record(tag, time, record)
72
- "#{Time.at(time).localtime} #{tag}: #{@formatter.format(tag, time, record).chomp}\n"
73
+ "#{Time.at(time).localtime.strftime(TIME_FORMAT)} #{tag}: #{@formatter.format(tag, time, record).chomp}\n"
73
74
  end
74
75
 
75
76
  def write(chunk)
@@ -36,6 +36,7 @@ module Fluent
36
36
 
37
37
  CHUNK_KEY_PATTERN = /^[-_.@a-zA-Z0-9]+$/
38
38
  CHUNK_KEY_PLACEHOLDER_PATTERN = /\$\{[-_.@a-zA-Z0-9]+\}/
39
+ CHUNK_TAG_PLACEHOLDER_PATTERN = /\$\{(tag(?:\[\d+\])?)\}/
39
40
 
40
41
  CHUNKING_FIELD_WARN_NUM = 4
41
42
 
@@ -74,12 +75,12 @@ module Fluent
74
75
  config_param :retry_max_times, :integer, default: nil, desc: 'The maximum number of times to retry to flush while failing.'
75
76
 
76
77
  config_param :retry_secondary_threshold, :float, default: 0.8, desc: 'ratio of retry_timeout to switch to use secondary while failing.'
77
- # expornential backoff sequence will be initialized at the time of this threshold
78
+ # exponential backoff sequence will be initialized at the time of this threshold
78
79
 
79
80
  desc 'How to wait next retry to flush buffer.'
80
81
  config_param :retry_type, :enum, list: [:exponential_backoff, :periodic], default: :exponential_backoff
81
82
  ### Periodic -> fixed :retry_wait
82
- ### Exponencial backoff: k is number of retry times
83
+ ### Exponential backoff: k is number of retry times
83
84
  # c: constant factor, @retry_wait
84
85
  # b: base factor, @retry_exponential_backoff_base
85
86
  # k: times
@@ -118,6 +119,12 @@ module Fluent
118
119
  raise NotImplementedError, "BUG: output plugins MUST implement this method"
119
120
  end
120
121
 
122
+ def formatted_to_msgpack_binary
123
+ # To indicate custom format method (#format) returns msgpack binary or not.
124
+ # If #format returns msgpack binary, override this method to return true.
125
+ false
126
+ end
127
+
121
128
  def prefer_buffered_processing
122
129
  # override this method to return false only when all of these are true:
123
130
  # * plugin has both implementation for buffered and non-buffered methods
@@ -175,11 +182,13 @@ module Fluent
175
182
  @buffering = true
176
183
  end
177
184
  @custom_format = implement?(:custom_format)
185
+ @enable_msgpack_streamer = false # decided later
178
186
 
179
187
  @buffer = nil
180
188
  @secondary = nil
181
189
  @retry = nil
182
190
  @dequeued_chunks = nil
191
+ @output_enqueue_thread = nil
183
192
  @output_flush_threads = nil
184
193
 
185
194
  @simple_chunking = nil
@@ -326,6 +335,9 @@ module Fluent
326
335
  @buffering = prefer_buffered_processing
327
336
  if !@buffering && @buffer
328
337
  @buffer.terminate # it's not started, so terminate will be enough
338
+ # At here, this plugin works as non-buffered plugin.
339
+ # Un-assign @buffer not to show buffering metrics (e.g., in_monitor_agent)
340
+ @buffer = nil
329
341
  end
330
342
  end
331
343
 
@@ -336,6 +348,7 @@ module Fluent
336
348
  end
337
349
 
338
350
  @custom_format = implement?(:custom_format)
351
+ @enable_msgpack_streamer = @custom_format ? formatted_to_msgpack_binary : true
339
352
  @delayed_commit = if implement?(:buffered) && implement?(:delayed_commit)
340
353
  prefer_delayed_commit
341
354
  else
@@ -355,6 +368,9 @@ module Fluent
355
368
 
356
369
  @buffer.start
357
370
 
371
+ @output_enqueue_thread = nil
372
+ @output_enqueue_thread_running = true
373
+
358
374
  @output_flush_threads = []
359
375
  @output_flush_threads_mutex = Mutex.new
360
376
  @output_flush_threads_running = true
@@ -385,7 +401,7 @@ module Fluent
385
401
 
386
402
  unless @in_tests
387
403
  if @flush_mode == :interval || @chunk_key_time
388
- thread_create(:enqueue_thread, &method(:enqueue_thread_run))
404
+ @output_enqueue_thread = thread_create(:enqueue_thread, &method(:enqueue_thread_run))
389
405
  end
390
406
  end
391
407
  end
@@ -412,6 +428,12 @@ module Fluent
412
428
  force_flush
413
429
  end
414
430
  @buffer.before_shutdown
431
+ # Need to ensure to stop enqueueing ... after #shutdown, we cannot write any data
432
+ @output_enqueue_thread_running = false
433
+ if @output_enqueue_thread && @output_enqueue_thread.alive?
434
+ @output_enqueue_thread.wakeup
435
+ @output_enqueue_thread.join
436
+ end
415
437
  end
416
438
 
417
439
  super
@@ -483,12 +505,146 @@ module Fluent
483
505
  end
484
506
  end
485
507
 
508
+ def placeholder_validate!(name, str)
509
+ placeholder_validators(name, str).each do |v|
510
+ v.validate!
511
+ end
512
+ end
513
+
514
+ def placeholder_validators(name, str, time_key = (@chunk_key_time && @buffer_config.timekey), tag_key = @chunk_key_tag, chunk_keys = @chunk_keys)
515
+ validators = []
516
+
517
+ sec, title, example = get_placeholders_time(str)
518
+ if sec || time_key
519
+ validators << PlaceholderValidator.new(name, str, :time, {sec: sec, title: title, example: example, timekey: time_key})
520
+ end
521
+
522
+ parts = get_placeholders_tag(str)
523
+ if tag_key || !parts.empty?
524
+ validators << PlaceholderValidator.new(name, str, :tag, {parts: parts, tagkey: tag_key})
525
+ end
526
+
527
+ keys = get_placeholders_keys(str)
528
+ if chunk_keys && !chunk_keys.empty? || !keys.empty?
529
+ validators << PlaceholderValidator.new(name, str, :keys, {keys: keys, chunkkeys: chunk_keys})
530
+ end
531
+
532
+ validators
533
+ end
534
+
535
+ class PlaceholderValidator
536
+ attr_reader :name, :string, :type, :argument
537
+
538
+ def initialize(name, str, type, arg)
539
+ @name = name
540
+ @string = str
541
+ @type = type
542
+ raise ArgumentError, "invalid type:#{type}" if @type != :time && @type != :tag && @type != :keys
543
+ @argument = arg
544
+ end
545
+
546
+ def time?
547
+ @type == :time
548
+ end
549
+
550
+ def tag?
551
+ @type == :tag
552
+ end
553
+
554
+ def keys?
555
+ @type == :keys
556
+ end
557
+
558
+ def validate!
559
+ case @type
560
+ when :time then validate_time!
561
+ when :tag then validate_tag!
562
+ when :keys then validate_keys!
563
+ end
564
+ end
565
+
566
+ def validate_time!
567
+ sec = @argument[:sec]
568
+ title = @argument[:title]
569
+ example = @argument[:example]
570
+ timekey = @argument[:timekey]
571
+ if !sec && timekey
572
+ raise Fluent::ConfigError, "Parameter '#{name}' doesn't have timestamp placeholders for timekey #{timekey.to_i}"
573
+ end
574
+ if sec && !timekey
575
+ raise Fluent::ConfigError, "Parameter '#{name}' has timestamp placeholders, but chunk key 'time' is not configured"
576
+ end
577
+ if sec && timekey && timekey < sec
578
+ raise Fluent::ConfigError, "Parameter '#{@name}' doesn't have timestamp placeholder for #{title}('#{example}') for timekey #{timekey.to_i}"
579
+ end
580
+ end
581
+
582
+ def validate_tag!
583
+ parts = @argument[:parts]
584
+ tagkey = @argument[:tagkey]
585
+ if tagkey && parts.empty?
586
+ raise Fluent::ConfigError, "Parameter '#{@name}' doesn't have tag placeholder"
587
+ end
588
+ if !tagkey && !parts.empty?
589
+ raise Fluent::ConfigError, "Parameter '#{@name}' has tag placeholders, but chunk key 'tag' is not configured"
590
+ end
591
+ end
592
+
593
+ def validate_keys!
594
+ keys = @argument[:keys]
595
+ chunk_keys = @argument[:chunkkeys]
596
+ if (chunk_keys - keys).size > 0
597
+ not_specified = (chunk_keys - keys).sort
598
+ raise Fluent::ConfigError, "Parameter '#{@name}' doesn't have enough placeholders for keys #{not_specified.join(',')}"
599
+ end
600
+ if (keys - chunk_keys).size > 0
601
+ not_satisfied = (keys - chunk_keys).sort
602
+ raise Fluent::ConfigError, "Parameter '#{@name}' has placeholders, but chunk keys doesn't have keys #{not_satisfied.join(',')}"
603
+ end
604
+ end
605
+ end
606
+
607
+ TIME_KEY_PLACEHOLDER_THRESHOLDS = [
608
+ [1, :second, '%S'],
609
+ [60, :minute, '%M'],
610
+ [3600, :hour, '%H'],
611
+ [86400, :day, '%d'],
612
+ ]
613
+ TIMESTAMP_CHECK_BASE_TIME = Time.parse("2016-01-01 00:00:00 UTC")
614
+ # it's not validated to use timekey larger than 1 day
615
+ def get_placeholders_time(str)
616
+ base_str = TIMESTAMP_CHECK_BASE_TIME.strftime(str)
617
+ TIME_KEY_PLACEHOLDER_THRESHOLDS.each do |triple|
618
+ sec = triple.first
619
+ return triple if (TIMESTAMP_CHECK_BASE_TIME + sec).strftime(str) != base_str
620
+ end
621
+ nil
622
+ end
623
+
624
+ # -1 means whole tag
625
+ def get_placeholders_tag(str)
626
+ # [["tag"],["tag[0]"]]
627
+ parts = []
628
+ str.scan(CHUNK_TAG_PLACEHOLDER_PATTERN).map(&:first).each do |ph|
629
+ if ph == "tag"
630
+ parts << -1
631
+ elsif ph =~ /^tag\[(\d+)\]$/
632
+ parts << $1.to_i
633
+ end
634
+ end
635
+ parts.sort
636
+ end
637
+
638
+ def get_placeholders_keys(str)
639
+ str.scan(CHUNK_KEY_PLACEHOLDER_PATTERN).map{|ph| ph[2..-2]}.reject{|s| s == "tag"}.sort
640
+ end
641
+
486
642
  # TODO: optimize this code
487
643
  def extract_placeholders(str, metadata)
488
644
  if metadata.empty?
489
645
  str
490
646
  else
491
- rvalue = str
647
+ rvalue = str.dup
492
648
  # strftime formatting
493
649
  if @chunk_key_time # this section MUST be earlier than rest to use raw 'str'
494
650
  @output_time_formatter_cache[str] ||= Fluent::Timezone.formatter(@timekey_zone, str)
@@ -496,14 +652,18 @@ module Fluent
496
652
  end
497
653
  # ${tag}, ${tag[0]}, ${tag[1]}, ...
498
654
  if @chunk_key_tag
499
- if str =~ /\$\{tag\[\d+\]\}/
500
- hash = {'${tag}' => metadata.tag}
655
+ if str.include?('${tag}')
656
+ rvalue = rvalue.gsub('${tag}', metadata.tag)
657
+ end
658
+ if str =~ CHUNK_TAG_PLACEHOLDER_PATTERN
659
+ hash = {}
501
660
  metadata.tag.split('.').each_with_index do |part, i|
502
661
  hash["${tag[#{i}]}"] = part
503
662
  end
504
- rvalue = rvalue.gsub(/\$\{tag(\[\d+\])?\}/, hash)
505
- elsif str.include?('${tag}')
506
- rvalue = rvalue.gsub('${tag}', metadata.tag)
663
+ rvalue = rvalue.gsub(CHUNK_TAG_PLACEHOLDER_PATTERN, hash)
664
+ end
665
+ if rvalue =~ CHUNK_TAG_PLACEHOLDER_PATTERN
666
+ log.warn "tag placeholder '#{$1}' not replaced. tag:#{metadata.tag}, template:#{str}"
507
667
  end
508
668
  end
509
669
  # ${a_chunk_key}, ...
@@ -514,6 +674,9 @@ module Fluent
514
674
  end
515
675
  rvalue = rvalue.gsub(CHUNK_KEY_PLACEHOLDER_PATTERN, hash)
516
676
  end
677
+ if rvalue =~ CHUNK_KEY_PLACEHOLDER_PATTERN
678
+ log.warn "chunk key placeholder '#{$1}' not replaced. templace:#{str}"
679
+ end
517
680
  rvalue
518
681
  end
519
682
  end
@@ -595,6 +758,13 @@ module Fluent
595
758
  end
596
759
  end
597
760
 
761
+ def metadata_for_test(tag, time, record)
762
+ raise "BUG: #metadata_for_test is available only when no actual metadata exists" unless @buffer.metadata_list.empty?
763
+ m = metadata(tag, time, record)
764
+ @buffer.metadata_list_clear!
765
+ m
766
+ end
767
+
598
768
  def execute_chunking(tag, es, enqueue: false)
599
769
  if @simple_chunking
600
770
  handle_stream_simple(tag, es, enqueue: enqueue)
@@ -641,7 +811,17 @@ module Fluent
641
811
  end
642
812
 
643
813
  FORMAT_MSGPACK_STREAM = ->(e){ e.to_msgpack_stream }
814
+ FORMAT_COMPRESSED_MSGPACK_STREAM = ->(e){ e.to_compressed_msgpack_stream }
644
815
  FORMAT_MSGPACK_STREAM_TIME_INT = ->(e){ e.to_msgpack_stream(time_int: true) }
816
+ FORMAT_COMPRESSED_MSGPACK_STREAM_TIME_INT = ->(e){ e.to_compressed_msgpack_stream(time_int: true) }
817
+
818
+ def generate_format_proc
819
+ if @buffer && @buffer.compress == :gzip
820
+ @time_as_integer ? FORMAT_COMPRESSED_MSGPACK_STREAM_TIME_INT : FORMAT_COMPRESSED_MSGPACK_STREAM
821
+ else
822
+ @time_as_integer ? FORMAT_MSGPACK_STREAM_TIME_INT : FORMAT_MSGPACK_STREAM
823
+ end
824
+ end
645
825
 
646
826
  # metadata_and_data is a Hash of:
647
827
  # (standard format) metadata => event stream
@@ -651,7 +831,7 @@ module Fluent
651
831
  # `@buffer.write` will do this splitting.
652
832
  # For custom format, formatting will be done here. Custom formatting always requires
653
833
  # iteration of event stream, and it should be done just once even if total event stream size
654
- # is biggar than chunk_limit_size because of performance.
834
+ # is bigger than chunk_limit_size because of performance.
655
835
  def handle_stream_with_custom_format(tag, es, enqueue: false)
656
836
  meta_and_data = {}
657
837
  records = 0
@@ -669,7 +849,7 @@ module Fluent
669
849
  end
670
850
 
671
851
  def handle_stream_with_standard_format(tag, es, enqueue: false)
672
- format_proc = @time_as_integer ? FORMAT_MSGPACK_STREAM_TIME_INT : FORMAT_MSGPACK_STREAM
852
+ format_proc = generate_format_proc
673
853
  meta_and_data = {}
674
854
  records = 0
675
855
  es.each do |time, record|
@@ -697,7 +877,7 @@ module Fluent
697
877
  records += 1
698
878
  end
699
879
  else
700
- format_proc = @time_as_integer ? FORMAT_MSGPACK_STREAM_TIME_INT : FORMAT_MSGPACK_STREAM
880
+ format_proc = generate_format_proc
701
881
  data = es
702
882
  end
703
883
  write_guard do
@@ -793,7 +973,7 @@ module Fluent
793
973
  using_secondary = true
794
974
  end
795
975
 
796
- unless @custom_format
976
+ if @enable_msgpack_streamer
797
977
  chunk.extend ChunkMessagePackEventStreamer
798
978
  end
799
979
 
@@ -821,6 +1001,10 @@ module Fluent
821
1001
  log.debug "taking back chunk for errors.", plugin_id: plugin_id, chunk: dump_unique_id_hex(chunk.unique_id)
822
1002
  @buffer.takeback_chunk(chunk.unique_id)
823
1003
 
1004
+ if @under_plugin_development
1005
+ raise
1006
+ end
1007
+
824
1008
  @retry_mutex.synchronize do
825
1009
  if @retry
826
1010
  @counters_monitor.synchronize{ @num_errors += 1 }
@@ -941,7 +1125,7 @@ module Fluent
941
1125
  log.debug "enqueue_thread actually running"
942
1126
 
943
1127
  begin
944
- while @output_flush_threads_running
1128
+ while @output_enqueue_thread_running
945
1129
  now_int = Time.now.to_i
946
1130
  if @output_flush_interrupted
947
1131
  sleep interval
@@ -965,16 +1149,18 @@ module Fluent
965
1149
  @buffer.enqueue_all{ |metadata, chunk| metadata.timekey < current_timekey && metadata.timekey + timekey_unit + timekey_wait <= now_int }
966
1150
  end
967
1151
  rescue => e
968
- log.error "unexpected error while checking flushed chunks. ignored.", plugin_id: plugin_id, error_class: e.class, error: e
1152
+ raise if @under_plugin_development
1153
+ log.error "unexpected error while checking flushed chunks. ignored.", plugin_id: plugin_id, error: e
969
1154
  log.error_backtrace
1155
+ ensure
1156
+ @output_enqueue_thread_waiting = false
1157
+ @output_enqueue_thread_mutex.unlock
970
1158
  end
971
- @output_enqueue_thread_waiting = false
972
- @output_enqueue_thread_mutex.unlock
973
1159
  sleep interval
974
1160
  end
975
1161
  rescue => e
976
1162
  # normal errors are rescued by inner begin-rescue clause.
977
- log.error "error on enqueue thread", plugin_id: plugin_id, error_class: e.class, error: e
1163
+ log.error "error on enqueue thread", plugin_id: plugin_id, error: e
978
1164
  log.error_backtrace
979
1165
  raise
980
1166
  end
@@ -1003,7 +1189,7 @@ module Fluent
1003
1189
  # next_flush_interval uses flush_thread_interval or flush_thread_burst_interval (or retrying)
1004
1190
  interval = next_flush_time.to_f - Time.now.to_f
1005
1191
  # TODO: if secondary && delayed-commit, next_flush_time will be much longer than expected (because @retry still exists)
1006
- # @retry should be cleard if delayed commit is enabled? Or any other solution?
1192
+ # @retry should be cleared if delayed commit is enabled? Or any other solution?
1007
1193
  state.next_time = Process.clock_gettime(clock_id) + interval
1008
1194
  end
1009
1195