fluentd 1.17.0-x86-mingw32 → 1.17.1-x86-mingw32

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (259) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +46 -0
  3. data/README.md +1 -0
  4. data/SECURITY.md +2 -2
  5. data/fluent.conf +14 -14
  6. data/lib/fluent/command/cap_ctl.rb +4 -4
  7. data/lib/fluent/compat/call_super_mixin.rb +3 -3
  8. data/lib/fluent/compat/propagate_default.rb +4 -4
  9. data/lib/fluent/config/yaml_parser/parser.rb +4 -0
  10. data/lib/fluent/log/console_adapter.rb +4 -2
  11. data/lib/fluent/plugin/in_exec.rb +14 -2
  12. data/lib/fluent/plugin/in_http.rb +1 -1
  13. data/lib/fluent/plugin/in_sample.rb +13 -7
  14. data/lib/fluent/plugin/in_tail.rb +65 -23
  15. data/lib/fluent/plugin/out_copy.rb +1 -1
  16. data/lib/fluent/plugin/out_file.rb +8 -0
  17. data/lib/fluent/plugin/out_http.rb +12 -0
  18. data/lib/fluent/plugin/parser_json.rb +4 -12
  19. data/lib/fluent/plugin_helper/http_server/server.rb +1 -1
  20. data/lib/fluent/version.rb +1 -1
  21. data/templates/new_gem/fluent-plugin.gemspec.erb +6 -5
  22. metadata +25 -472
  23. data/.github/DISCUSSION_TEMPLATE/q-a-japanese.yml +0 -50
  24. data/.github/DISCUSSION_TEMPLATE/q-a.yml +0 -47
  25. data/.github/ISSUE_TEMPLATE/bug_report.yml +0 -71
  26. data/.github/ISSUE_TEMPLATE/config.yml +0 -5
  27. data/.github/ISSUE_TEMPLATE/feature_request.yml +0 -39
  28. data/.github/ISSUE_TEMPLATE.md +0 -17
  29. data/.github/PULL_REQUEST_TEMPLATE.md +0 -14
  30. data/.github/workflows/stale-actions.yml +0 -24
  31. data/.github/workflows/test-ruby-head.yml +0 -31
  32. data/.github/workflows/test.yml +0 -32
  33. data/.gitignore +0 -30
  34. data/Gemfile +0 -9
  35. data/fluentd.gemspec +0 -62
  36. data/test/command/test_binlog_reader.rb +0 -362
  37. data/test/command/test_ca_generate.rb +0 -70
  38. data/test/command/test_cap_ctl.rb +0 -100
  39. data/test/command/test_cat.rb +0 -128
  40. data/test/command/test_ctl.rb +0 -56
  41. data/test/command/test_fluentd.rb +0 -1291
  42. data/test/command/test_plugin_config_formatter.rb +0 -397
  43. data/test/command/test_plugin_generator.rb +0 -109
  44. data/test/compat/test_calls_super.rb +0 -166
  45. data/test/compat/test_parser.rb +0 -92
  46. data/test/config/assertions.rb +0 -42
  47. data/test/config/test_config_parser.rb +0 -551
  48. data/test/config/test_configurable.rb +0 -1784
  49. data/test/config/test_configure_proxy.rb +0 -604
  50. data/test/config/test_dsl.rb +0 -415
  51. data/test/config/test_element.rb +0 -518
  52. data/test/config/test_literal_parser.rb +0 -309
  53. data/test/config/test_plugin_configuration.rb +0 -56
  54. data/test/config/test_section.rb +0 -191
  55. data/test/config/test_system_config.rb +0 -195
  56. data/test/config/test_types.rb +0 -408
  57. data/test/counter/test_client.rb +0 -563
  58. data/test/counter/test_error.rb +0 -44
  59. data/test/counter/test_mutex_hash.rb +0 -179
  60. data/test/counter/test_server.rb +0 -589
  61. data/test/counter/test_store.rb +0 -258
  62. data/test/counter/test_validator.rb +0 -137
  63. data/test/helper.rb +0 -155
  64. data/test/helpers/fuzzy_assert.rb +0 -89
  65. data/test/helpers/process_extenstion.rb +0 -33
  66. data/test/log/test_console_adapter.rb +0 -117
  67. data/test/plugin/data/2010/01/20100102-030405.log +0 -0
  68. data/test/plugin/data/2010/01/20100102-030406.log +0 -0
  69. data/test/plugin/data/2010/01/20100102.log +0 -0
  70. data/test/plugin/data/log/bar +0 -0
  71. data/test/plugin/data/log/foo/bar.log +0 -0
  72. data/test/plugin/data/log/foo/bar2 +0 -0
  73. data/test/plugin/data/log/test.log +0 -0
  74. data/test/plugin/data/log_numeric/01.log +0 -0
  75. data/test/plugin/data/log_numeric/02.log +0 -0
  76. data/test/plugin/data/log_numeric/12.log +0 -0
  77. data/test/plugin/data/log_numeric/14.log +0 -0
  78. data/test/plugin/data/sd_file/config +0 -11
  79. data/test/plugin/data/sd_file/config.json +0 -17
  80. data/test/plugin/data/sd_file/config.yaml +0 -11
  81. data/test/plugin/data/sd_file/config.yml +0 -11
  82. data/test/plugin/data/sd_file/invalid_config.yml +0 -7
  83. data/test/plugin/in_tail/test_fifo.rb +0 -121
  84. data/test/plugin/in_tail/test_io_handler.rb +0 -150
  85. data/test/plugin/in_tail/test_position_file.rb +0 -346
  86. data/test/plugin/out_forward/test_ack_handler.rb +0 -140
  87. data/test/plugin/out_forward/test_connection_manager.rb +0 -145
  88. data/test/plugin/out_forward/test_handshake_protocol.rb +0 -112
  89. data/test/plugin/out_forward/test_load_balancer.rb +0 -106
  90. data/test/plugin/out_forward/test_socket_cache.rb +0 -174
  91. data/test/plugin/test_bare_output.rb +0 -131
  92. data/test/plugin/test_base.rb +0 -247
  93. data/test/plugin/test_buf_file.rb +0 -1314
  94. data/test/plugin/test_buf_file_single.rb +0 -898
  95. data/test/plugin/test_buf_memory.rb +0 -42
  96. data/test/plugin/test_buffer.rb +0 -1493
  97. data/test/plugin/test_buffer_chunk.rb +0 -209
  98. data/test/plugin/test_buffer_file_chunk.rb +0 -871
  99. data/test/plugin/test_buffer_file_single_chunk.rb +0 -611
  100. data/test/plugin/test_buffer_memory_chunk.rb +0 -339
  101. data/test/plugin/test_compressable.rb +0 -87
  102. data/test/plugin/test_file_util.rb +0 -96
  103. data/test/plugin/test_filter.rb +0 -368
  104. data/test/plugin/test_filter_grep.rb +0 -697
  105. data/test/plugin/test_filter_parser.rb +0 -731
  106. data/test/plugin/test_filter_record_transformer.rb +0 -577
  107. data/test/plugin/test_filter_stdout.rb +0 -207
  108. data/test/plugin/test_formatter_csv.rb +0 -136
  109. data/test/plugin/test_formatter_hash.rb +0 -38
  110. data/test/plugin/test_formatter_json.rb +0 -61
  111. data/test/plugin/test_formatter_ltsv.rb +0 -70
  112. data/test/plugin/test_formatter_msgpack.rb +0 -28
  113. data/test/plugin/test_formatter_out_file.rb +0 -116
  114. data/test/plugin/test_formatter_single_value.rb +0 -44
  115. data/test/plugin/test_formatter_tsv.rb +0 -76
  116. data/test/plugin/test_in_debug_agent.rb +0 -49
  117. data/test/plugin/test_in_exec.rb +0 -261
  118. data/test/plugin/test_in_forward.rb +0 -1178
  119. data/test/plugin/test_in_gc_stat.rb +0 -62
  120. data/test/plugin/test_in_http.rb +0 -1124
  121. data/test/plugin/test_in_monitor_agent.rb +0 -922
  122. data/test/plugin/test_in_object_space.rb +0 -66
  123. data/test/plugin/test_in_sample.rb +0 -190
  124. data/test/plugin/test_in_syslog.rb +0 -505
  125. data/test/plugin/test_in_tail.rb +0 -3429
  126. data/test/plugin/test_in_tcp.rb +0 -328
  127. data/test/plugin/test_in_udp.rb +0 -296
  128. data/test/plugin/test_in_unix.rb +0 -181
  129. data/test/plugin/test_input.rb +0 -137
  130. data/test/plugin/test_metadata.rb +0 -89
  131. data/test/plugin/test_metrics.rb +0 -294
  132. data/test/plugin/test_metrics_local.rb +0 -96
  133. data/test/plugin/test_multi_output.rb +0 -204
  134. data/test/plugin/test_out_copy.rb +0 -308
  135. data/test/plugin/test_out_exec.rb +0 -312
  136. data/test/plugin/test_out_exec_filter.rb +0 -606
  137. data/test/plugin/test_out_file.rb +0 -1038
  138. data/test/plugin/test_out_forward.rb +0 -1349
  139. data/test/plugin/test_out_http.rb +0 -557
  140. data/test/plugin/test_out_null.rb +0 -105
  141. data/test/plugin/test_out_relabel.rb +0 -28
  142. data/test/plugin/test_out_roundrobin.rb +0 -146
  143. data/test/plugin/test_out_secondary_file.rb +0 -458
  144. data/test/plugin/test_out_stdout.rb +0 -205
  145. data/test/plugin/test_out_stream.rb +0 -103
  146. data/test/plugin/test_output.rb +0 -1334
  147. data/test/plugin/test_output_as_buffered.rb +0 -2024
  148. data/test/plugin/test_output_as_buffered_backup.rb +0 -363
  149. data/test/plugin/test_output_as_buffered_compress.rb +0 -179
  150. data/test/plugin/test_output_as_buffered_overflow.rb +0 -250
  151. data/test/plugin/test_output_as_buffered_retries.rb +0 -966
  152. data/test/plugin/test_output_as_buffered_secondary.rb +0 -882
  153. data/test/plugin/test_output_as_standard.rb +0 -374
  154. data/test/plugin/test_owned_by.rb +0 -34
  155. data/test/plugin/test_parser.rb +0 -399
  156. data/test/plugin/test_parser_apache.rb +0 -42
  157. data/test/plugin/test_parser_apache2.rb +0 -47
  158. data/test/plugin/test_parser_apache_error.rb +0 -45
  159. data/test/plugin/test_parser_csv.rb +0 -200
  160. data/test/plugin/test_parser_json.rb +0 -244
  161. data/test/plugin/test_parser_labeled_tsv.rb +0 -160
  162. data/test/plugin/test_parser_msgpack.rb +0 -127
  163. data/test/plugin/test_parser_multiline.rb +0 -111
  164. data/test/plugin/test_parser_nginx.rb +0 -88
  165. data/test/plugin/test_parser_none.rb +0 -52
  166. data/test/plugin/test_parser_regexp.rb +0 -284
  167. data/test/plugin/test_parser_syslog.rb +0 -650
  168. data/test/plugin/test_parser_tsv.rb +0 -122
  169. data/test/plugin/test_sd_file.rb +0 -228
  170. data/test/plugin/test_sd_srv.rb +0 -230
  171. data/test/plugin/test_storage.rb +0 -166
  172. data/test/plugin/test_storage_local.rb +0 -335
  173. data/test/plugin/test_string_util.rb +0 -26
  174. data/test/plugin_helper/data/cert/cert-key.pem +0 -27
  175. data/test/plugin_helper/data/cert/cert-with-CRLF.pem +0 -19
  176. data/test/plugin_helper/data/cert/cert-with-no-newline.pem +0 -19
  177. data/test/plugin_helper/data/cert/cert.pem +0 -19
  178. data/test/plugin_helper/data/cert/cert_chains/ca-cert-key.pem +0 -27
  179. data/test/plugin_helper/data/cert/cert_chains/ca-cert.pem +0 -20
  180. data/test/plugin_helper/data/cert/cert_chains/cert-key.pem +0 -27
  181. data/test/plugin_helper/data/cert/cert_chains/cert.pem +0 -40
  182. data/test/plugin_helper/data/cert/empty.pem +0 -0
  183. data/test/plugin_helper/data/cert/generate_cert.rb +0 -125
  184. data/test/plugin_helper/data/cert/with_ca/ca-cert-key-pass.pem +0 -30
  185. data/test/plugin_helper/data/cert/with_ca/ca-cert-key.pem +0 -27
  186. data/test/plugin_helper/data/cert/with_ca/ca-cert-pass.pem +0 -20
  187. data/test/plugin_helper/data/cert/with_ca/ca-cert.pem +0 -20
  188. data/test/plugin_helper/data/cert/with_ca/cert-key-pass.pem +0 -30
  189. data/test/plugin_helper/data/cert/with_ca/cert-key.pem +0 -27
  190. data/test/plugin_helper/data/cert/with_ca/cert-pass.pem +0 -21
  191. data/test/plugin_helper/data/cert/with_ca/cert.pem +0 -21
  192. data/test/plugin_helper/data/cert/without_ca/cert-key-pass.pem +0 -30
  193. data/test/plugin_helper/data/cert/without_ca/cert-key.pem +0 -27
  194. data/test/plugin_helper/data/cert/without_ca/cert-pass.pem +0 -20
  195. data/test/plugin_helper/data/cert/without_ca/cert.pem +0 -20
  196. data/test/plugin_helper/http_server/test_app.rb +0 -65
  197. data/test/plugin_helper/http_server/test_route.rb +0 -32
  198. data/test/plugin_helper/service_discovery/test_manager.rb +0 -93
  199. data/test/plugin_helper/service_discovery/test_round_robin_balancer.rb +0 -21
  200. data/test/plugin_helper/test_cert_option.rb +0 -25
  201. data/test/plugin_helper/test_child_process.rb +0 -862
  202. data/test/plugin_helper/test_compat_parameters.rb +0 -358
  203. data/test/plugin_helper/test_event_emitter.rb +0 -80
  204. data/test/plugin_helper/test_event_loop.rb +0 -52
  205. data/test/plugin_helper/test_extract.rb +0 -194
  206. data/test/plugin_helper/test_formatter.rb +0 -255
  207. data/test/plugin_helper/test_http_server_helper.rb +0 -372
  208. data/test/plugin_helper/test_inject.rb +0 -561
  209. data/test/plugin_helper/test_metrics.rb +0 -137
  210. data/test/plugin_helper/test_parser.rb +0 -264
  211. data/test/plugin_helper/test_record_accessor.rb +0 -238
  212. data/test/plugin_helper/test_retry_state.rb +0 -1006
  213. data/test/plugin_helper/test_server.rb +0 -1895
  214. data/test/plugin_helper/test_service_discovery.rb +0 -165
  215. data/test/plugin_helper/test_socket.rb +0 -146
  216. data/test/plugin_helper/test_storage.rb +0 -542
  217. data/test/plugin_helper/test_thread.rb +0 -164
  218. data/test/plugin_helper/test_timer.rb +0 -130
  219. data/test/scripts/exec_script.rb +0 -32
  220. data/test/scripts/fluent/plugin/formatter1/formatter_test1.rb +0 -7
  221. data/test/scripts/fluent/plugin/formatter2/formatter_test2.rb +0 -7
  222. data/test/scripts/fluent/plugin/formatter_known.rb +0 -8
  223. data/test/scripts/fluent/plugin/out_test.rb +0 -81
  224. data/test/scripts/fluent/plugin/out_test2.rb +0 -80
  225. data/test/scripts/fluent/plugin/parser_known.rb +0 -4
  226. data/test/test_capability.rb +0 -74
  227. data/test/test_clock.rb +0 -164
  228. data/test/test_config.rb +0 -369
  229. data/test/test_configdsl.rb +0 -148
  230. data/test/test_daemonizer.rb +0 -91
  231. data/test/test_engine.rb +0 -203
  232. data/test/test_event.rb +0 -531
  233. data/test/test_event_router.rb +0 -348
  234. data/test/test_event_time.rb +0 -199
  235. data/test/test_file_wrapper.rb +0 -53
  236. data/test/test_filter.rb +0 -121
  237. data/test/test_fluent_log_event_router.rb +0 -99
  238. data/test/test_formatter.rb +0 -369
  239. data/test/test_input.rb +0 -31
  240. data/test/test_log.rb +0 -1076
  241. data/test/test_match.rb +0 -148
  242. data/test/test_mixin.rb +0 -351
  243. data/test/test_msgpack_factory.rb +0 -50
  244. data/test/test_oj_options.rb +0 -55
  245. data/test/test_output.rb +0 -278
  246. data/test/test_plugin.rb +0 -251
  247. data/test/test_plugin_classes.rb +0 -370
  248. data/test/test_plugin_helper.rb +0 -81
  249. data/test/test_plugin_id.rb +0 -119
  250. data/test/test_process.rb +0 -14
  251. data/test/test_root_agent.rb +0 -951
  252. data/test/test_static_config_analysis.rb +0 -177
  253. data/test/test_supervisor.rb +0 -821
  254. data/test/test_test_drivers.rb +0 -136
  255. data/test/test_time_formatter.rb +0 -301
  256. data/test/test_time_parser.rb +0 -362
  257. data/test/test_tls.rb +0 -65
  258. data/test/test_unique_id.rb +0 -47
  259. data/test/test_variable_store.rb +0 -65
@@ -1,966 +0,0 @@
1
- require_relative '../helper'
2
- require 'fluent/plugin/output'
3
- require 'fluent/plugin/buffer'
4
- require 'fluent/event'
5
-
6
- require 'json'
7
- require 'time'
8
- require 'timeout'
9
- require 'timecop'
10
-
11
- module FluentPluginOutputAsBufferedRetryTest
12
- class DummyBareOutput < Fluent::Plugin::Output
13
- def register(name, &block)
14
- instance_variable_set("@#{name}", block)
15
- end
16
- end
17
- class DummySyncOutput < DummyBareOutput
18
- def initialize
19
- super
20
- @process = nil
21
- end
22
- def process(tag, es)
23
- @process ? @process.call(tag, es) : nil
24
- end
25
- end
26
- class DummyFullFeatureOutput < DummyBareOutput
27
- def initialize
28
- super
29
- @prefer_buffered_processing = nil
30
- @prefer_delayed_commit = nil
31
- @process = nil
32
- @format = nil
33
- @write = nil
34
- @try_write = nil
35
- end
36
- def prefer_buffered_processing
37
- @prefer_buffered_processing ? @prefer_buffered_processing.call : false
38
- end
39
- def prefer_delayed_commit
40
- @prefer_delayed_commit ? @prefer_delayed_commit.call : false
41
- end
42
- def process(tag, es)
43
- @process ? @process.call(tag, es) : nil
44
- end
45
- def format(tag, time, record)
46
- @format ? @format.call(tag, time, record) : [tag, time, record].to_json
47
- end
48
- def write(chunk)
49
- @write ? @write.call(chunk) : nil
50
- end
51
- def try_write(chunk)
52
- @try_write ? @try_write.call(chunk) : nil
53
- end
54
- end
55
- class DummyFullFeatureOutput2 < DummyFullFeatureOutput
56
- def prefer_buffered_processing; true; end
57
- def prefer_delayed_commit; super; end
58
- def format(tag, time, record); super; end
59
- def write(chunk); super; end
60
- def try_write(chunk); super; end
61
- end
62
- end
63
-
64
- class BufferedOutputRetryTest < Test::Unit::TestCase
65
- def create_output(type=:full)
66
- case type
67
- when :bare then FluentPluginOutputAsBufferedRetryTest::DummyBareOutput.new
68
- when :sync then FluentPluginOutputAsBufferedRetryTest::DummySyncOutput.new
69
- when :full then FluentPluginOutputAsBufferedRetryTest::DummyFullFeatureOutput.new
70
- else
71
- raise ArgumentError, "unknown type: #{type}"
72
- end
73
- end
74
- def create_metadata(timekey: nil, tag: nil, variables: nil)
75
- Fluent::Plugin::Buffer::Metadata.new(timekey, tag, variables)
76
- end
77
- def waiting(seconds)
78
- begin
79
- Timeout.timeout(seconds) do
80
- yield
81
- end
82
- rescue Timeout::Error
83
- STDERR.print(*@i.log.out.logs)
84
- raise
85
- end
86
- end
87
- def dummy_event_stream
88
- Fluent::ArrayEventStream.new([
89
- [ event_time('2016-04-13 18:33:00'), {"name" => "moris", "age" => 36, "message" => "data1"} ],
90
- [ event_time('2016-04-13 18:33:13'), {"name" => "moris", "age" => 36, "message" => "data2"} ],
91
- [ event_time('2016-04-13 18:33:32'), {"name" => "moris", "age" => 36, "message" => "data3"} ],
92
- ])
93
- end
94
- def get_log_time(msg, logs)
95
- log_time = nil
96
- log = logs.find{|l| l.include?(msg) }
97
- if log && /^(\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2} [-+]\d{4}) \[error\]/ =~ log
98
- log_time = Time.parse($1)
99
- end
100
- log_time
101
- end
102
-
103
- setup do
104
- @i = create_output
105
- end
106
-
107
- teardown do
108
- if @i
109
- @i.stop unless @i.stopped?
110
- @i.before_shutdown unless @i.before_shutdown?
111
- @i.shutdown unless @i.shutdown?
112
- @i.after_shutdown unless @i.after_shutdown?
113
- @i.close unless @i.closed?
114
- @i.terminate unless @i.terminated?
115
- end
116
- Timecop.return
117
- end
118
-
119
- sub_test_case 'buffered output for retries with exponential backoff' do
120
- test 'exponential backoff is default strategy for retries' do
121
- chunk_key = 'tag'
122
- hash = {
123
- 'flush_interval' => 1,
124
- 'flush_thread_burst_interval' => 0.1,
125
- 'retry_randomize' => false,
126
- 'queued_chunks_limit_size' => 100
127
- }
128
- @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
129
- @i.register(:prefer_buffered_processing){ true }
130
- @i.start
131
- @i.after_start
132
-
133
- assert_equal :exponential_backoff, @i.buffer_config.retry_type
134
- assert_equal 1, @i.buffer_config.retry_wait
135
- assert_equal 2.0, @i.buffer_config.retry_exponential_backoff_base
136
- assert !@i.buffer_config.retry_randomize
137
-
138
- now = Time.parse('2016-04-13 18:17:00 -0700')
139
- Timecop.freeze( now )
140
-
141
- retry_state = @i.retry_state( @i.buffer_config.retry_randomize )
142
- retry_state.step
143
- assert_equal (1 * (2 ** 1)), (retry_state.next_time - now)
144
- retry_state.step
145
- assert_equal (1 * (2 ** 2)), (retry_state.next_time - now)
146
- retry_state.step
147
- assert_equal (1 * (2 ** 3)), (retry_state.next_time - now)
148
- retry_state.step
149
- assert_equal (1 * (2 ** 4)), (retry_state.next_time - now)
150
- end
151
-
152
- test 'does retries correctly when #write fails' do
153
- chunk_key = 'tag'
154
- hash = {
155
- 'flush_interval' => 1,
156
- 'flush_thread_burst_interval' => 0.1,
157
- 'retry_randomize' => false,
158
- 'retry_max_interval' => 60 * 60,
159
- }
160
- @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
161
- @i.register(:prefer_buffered_processing){ true }
162
- @i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
163
- @i.register(:write){|chunk| raise "yay, your #write must fail" }
164
- @i.start
165
- @i.after_start
166
-
167
- @i.interrupt_flushes
168
-
169
- now = Time.parse('2016-04-13 18:33:30 -0700')
170
- Timecop.freeze( now )
171
-
172
- @i.emit_events("test.tag.1", dummy_event_stream())
173
-
174
- now = Time.parse('2016-04-13 18:33:32 -0700')
175
- Timecop.freeze( now )
176
-
177
- @i.enqueue_thread_wait
178
-
179
- @i.flush_thread_wakeup
180
- waiting(4){ Thread.pass until @i.write_count > 0 }
181
-
182
- assert{ @i.write_count > 0 }
183
- assert{ @i.num_errors > 0 }
184
-
185
- now = @i.next_flush_time
186
- Timecop.freeze( now )
187
- @i.flush_thread_wakeup
188
- waiting(4){ Thread.pass until @i.write_count > 1 }
189
-
190
- assert{ @i.write_count > 1 }
191
- assert{ @i.num_errors > 1 }
192
- end
193
-
194
- test 'max retry interval is limited by retry_max_interval' do
195
- chunk_key = 'tag'
196
- hash = {
197
- 'flush_interval' => 1,
198
- 'flush_thread_burst_interval' => 0.1,
199
- 'retry_randomize' => false,
200
- 'retry_max_interval' => 60,
201
- }
202
- @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
203
- @i.register(:prefer_buffered_processing){ true }
204
- @i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
205
- @i.register(:write){|chunk| raise "yay, your #write must fail" }
206
- @i.start
207
- @i.after_start
208
-
209
- @i.interrupt_flushes
210
-
211
- now = Time.parse('2016-04-13 18:33:30 -0700')
212
- Timecop.freeze( now )
213
-
214
- @i.emit_events("test.tag.1", dummy_event_stream())
215
-
216
- now = Time.parse('2016-04-13 18:33:32 -0700')
217
- Timecop.freeze( now )
218
-
219
- @i.enqueue_thread_wait
220
-
221
- @i.flush_thread_wakeup
222
- waiting(4){ Thread.pass until @i.write_count > 0 && @i.num_errors > 0 }
223
-
224
- assert{ @i.write_count > 0 }
225
- assert{ @i.num_errors > 0 }
226
-
227
- prev_write_count = @i.write_count
228
- prev_num_errors = @i.num_errors
229
-
230
- 10.times do
231
- now = @i.next_flush_time
232
- Timecop.freeze( now )
233
- @i.flush_thread_wakeup
234
- waiting(4){ Thread.pass until @i.write_count > prev_write_count && @i.num_errors > prev_num_errors }
235
-
236
- assert{ @i.write_count > prev_write_count }
237
- assert{ @i.num_errors > prev_num_errors }
238
-
239
- prev_write_count = @i.write_count
240
- prev_num_errors = @i.num_errors
241
- end
242
- # exponential backoff interval: 1 * 2 ** 10 == 1024
243
- # but it should be limited by retry_max_interval=60
244
- assert_equal 60, (@i.next_flush_time - now)
245
- end
246
-
247
- test 'output plugin give retries up by retry_timeout, and clear queue in buffer' do
248
- written_tags = []
249
-
250
- chunk_key = 'tag'
251
- hash = {
252
- 'flush_interval' => 1,
253
- 'flush_thread_burst_interval' => 0.1,
254
- 'retry_randomize' => false,
255
- 'retry_timeout' => 3600,
256
- 'queued_chunks_limit_size' => 100
257
- }
258
- @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
259
- @i.register(:prefer_buffered_processing){ true }
260
- @i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
261
- @i.register(:write){|chunk| written_tags << chunk.metadata.tag; raise "yay, your #write must fail" }
262
- @i.start
263
- @i.after_start
264
-
265
- @i.interrupt_flushes
266
-
267
- now = Time.parse('2016-04-13 18:33:30 -0700')
268
- Timecop.freeze( now )
269
-
270
- @i.emit_events("test.tag.1", dummy_event_stream())
271
-
272
- now = Time.parse('2016-04-13 18:33:31 -0700')
273
- Timecop.freeze( now )
274
-
275
- @i.emit_events("test.tag.2", dummy_event_stream())
276
-
277
- assert_equal 0, @i.write_count
278
- assert_equal 0, @i.num_errors
279
-
280
- @i.enqueue_thread_wait
281
- @i.flush_thread_wakeup
282
- waiting(4){ Thread.pass until @i.write_count > 0 && @i.num_errors > 0 }
283
-
284
- assert{ @i.buffer.queue.size > 0 }
285
- assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
286
-
287
- assert{ @i.write_count > 0 }
288
- assert{ @i.num_errors > 0 }
289
-
290
- prev_write_count = @i.write_count
291
- prev_num_errors = @i.num_errors
292
-
293
- first_failure = @i.retry.start
294
-
295
- 15.times do |i| # large enough
296
- now = @i.next_flush_time
297
- # p({i: i, now: now, diff: (now - Time.now)})
298
- # * if loop count is 12:
299
- # {:i=>0, :now=>2016-04-13 18:33:32 -0700, :diff=>1.0}
300
- # {:i=>1, :now=>2016-04-13 18:33:34 -0700, :diff=>2.0}
301
- # {:i=>2, :now=>2016-04-13 18:33:38 -0700, :diff=>4.0}
302
- # {:i=>3, :now=>2016-04-13 18:33:46 -0700, :diff=>8.0}
303
- # {:i=>4, :now=>2016-04-13 18:34:02 -0700, :diff=>16.0}
304
- # {:i=>5, :now=>2016-04-13 18:34:34 -0700, :diff=>32.0}
305
- # {:i=>6, :now=>2016-04-13 18:35:38 -0700, :diff=>64.0}
306
- # {:i=>7, :now=>2016-04-13 18:37:46 -0700, :diff=>128.0}
307
- # {:i=>8, :now=>2016-04-13 18:42:02 -0700, :diff=>256.0}
308
- # {:i=>9, :now=>2016-04-13 18:50:34 -0700, :diff=>512.0}
309
- # {:i=>10, :now=>2016-04-13 19:07:38 -0700, :diff=>1024.0}
310
- # {:i=>11, :now=>2016-04-13 19:33:31 -0700, :diff=>1553.0} # clear_queue!
311
-
312
- Timecop.freeze( now )
313
- @i.enqueue_thread_wait
314
- @i.flush_thread_wakeup
315
- waiting(4){ Thread.pass until @i.write_count > prev_write_count && @i.num_errors > prev_num_errors }
316
-
317
- assert{ @i.write_count > prev_write_count }
318
- assert{ @i.num_errors > prev_num_errors }
319
-
320
- break if @i.buffer.queue.size == 0
321
-
322
- prev_write_count = @i.write_count
323
- prev_num_errors = @i.num_errors
324
-
325
- assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
326
- end
327
- assert{ now >= first_failure + 3600 }
328
-
329
- assert{ @i.buffer.stage.size == 0 }
330
- assert{ written_tags.all?{|t| t == 'test.tag.1' } }
331
-
332
- @i.emit_events("test.tag.3", dummy_event_stream())
333
-
334
- logs = @i.log.out.logs
335
- assert{ logs.any?{|l| l.include?("[error]: Hit limit for retries. dropping all chunks in the buffer queue.") } }
336
- end
337
-
338
- test 'output plugin give retries up by retry_max_times, and clear queue in buffer' do
339
- written_tags = []
340
-
341
- chunk_key = 'tag'
342
- hash = {
343
- 'flush_interval' => 1,
344
- 'flush_thread_burst_interval' => 0.1,
345
- 'retry_randomize' => false,
346
- 'retry_max_times' => 10,
347
- 'queued_chunks_limit_size' => 100
348
- }
349
- @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
350
- @i.register(:prefer_buffered_processing){ true }
351
- @i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
352
- @i.register(:write){|chunk| written_tags << chunk.metadata.tag; raise "yay, your #write must fail" }
353
- @i.start
354
- @i.after_start
355
-
356
- @i.interrupt_flushes
357
-
358
- now = Time.parse('2016-04-13 18:33:30 -0700')
359
- Timecop.freeze( now )
360
-
361
- @i.emit_events("test.tag.1", dummy_event_stream())
362
-
363
- now = Time.parse('2016-04-13 18:33:31 -0700')
364
- Timecop.freeze( now )
365
-
366
- @i.emit_events("test.tag.2", dummy_event_stream())
367
-
368
- assert_equal 0, @i.write_count
369
- assert_equal 0, @i.num_errors
370
-
371
- @i.enqueue_thread_wait
372
- @i.flush_thread_wakeup
373
- waiting(4){ Thread.pass until @i.write_count > 0 && @i.num_errors > 0 }
374
-
375
- assert{ @i.buffer.queue.size > 0 }
376
- assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
377
-
378
- assert{ @i.write_count > 0 }
379
- assert{ @i.num_errors > 0 }
380
-
381
- prev_write_count = @i.write_count
382
- prev_num_errors = @i.num_errors
383
-
384
- _first_failure = @i.retry.start
385
-
386
- chunks = @i.buffer.queue.dup
387
-
388
- 20.times do |i| # large times enough
389
- now = @i.next_flush_time
390
-
391
- Timecop.freeze( now )
392
- @i.enqueue_thread_wait
393
- @i.flush_thread_wakeup
394
- waiting(4){ Thread.pass until @i.write_count > prev_write_count && @i.num_errors > prev_num_errors }
395
-
396
- assert{ @i.write_count > prev_write_count }
397
- assert{ @i.num_errors > prev_num_errors }
398
-
399
- break if @i.buffer.queue.size == 0
400
-
401
- prev_write_count = @i.write_count
402
- prev_num_errors = @i.num_errors
403
-
404
- assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
405
- end
406
- assert{ @i.buffer.stage.size == 0 }
407
- assert{ written_tags.all?{|t| t == 'test.tag.1' } }
408
-
409
- @i.emit_events("test.tag.3", dummy_event_stream())
410
-
411
- logs = @i.log.out.logs
412
- assert{ logs.any?{|l| l.include?("[error]: Hit limit for retries. dropping all chunks in the buffer queue.") && l.include?("retry_times=10") } }
413
-
414
- assert{ @i.buffer.queue.size == 0 }
415
- assert{ @i.buffer.stage.size == 1 }
416
- assert{ chunks.all?{|c| c.empty? } }
417
- end
418
-
419
- test 'output plugin limits queued chunks via queued_chunks_limit_size' do
420
- chunk_key = 'tag'
421
- hash = {
422
- 'flush_interval' => 1,
423
- 'flush_thread_burst_interval' => 0.1,
424
- 'retry_randomize' => false,
425
- 'retry_max_times' => 7,
426
- 'queued_chunks_limit_size' => 2,
427
- }
428
- @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
429
- @i.register(:prefer_buffered_processing) { true }
430
- @i.register(:format) { |tag,time,record| [tag,time.to_i,record].to_json + "\n" }
431
- @i.register(:write) { |chunk| raise "yay, your #write must fail" }
432
- @i.start
433
- @i.after_start
434
-
435
- @i.interrupt_flushes
436
-
437
- now = Time.parse('2016-04-13 18:33:30 -0700')
438
- Timecop.freeze(now)
439
-
440
- @i.emit_events("test.tag.1", dummy_event_stream())
441
-
442
- now = Time.parse('2016-04-13 18:33:31 -0700')
443
- Timecop.freeze(now)
444
-
445
- @i.emit_events("test.tag.2", dummy_event_stream())
446
-
447
- @i.enqueue_thread_wait
448
- @i.flush_thread_wakeup
449
- waiting(4) { Thread.pass until @i.write_count > 0 && @i.num_errors > 0 }
450
-
451
- assert { @i.buffer.queue.size > 0 }
452
- assert { @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
453
-
454
- assert { @i.write_count > 0 }
455
- assert { @i.num_errors > 0 }
456
-
457
- prev_write_count = @i.write_count
458
- prev_num_errors = @i.num_errors
459
-
460
- 20.times do |i| # large times enough
461
- now = @i.next_flush_time
462
-
463
- Timecop.freeze(now)
464
- @i.enqueue_thread_wait
465
- @i.flush_thread_wakeup
466
- waiting(4) { Thread.pass until @i.write_count > prev_write_count && @i.num_errors > prev_num_errors }
467
-
468
- @i.emit_events("test.tag.1", dummy_event_stream())
469
- assert { @i.buffer.queue.size <= 2 }
470
- assert { @i.buffer.stage.size == 1 } # all new data is stored into staged chunk
471
-
472
- break if @i.buffer.queue.size == 0
473
-
474
- prev_write_count = @i.write_count
475
- prev_num_errors = @i.num_errors
476
- end
477
- end
478
- end
479
-
480
- sub_test_case 'bufferd output for retries with periodical retry' do
481
- test 'periodical retries should retry to write in failing status per retry_wait' do
482
- chunk_key = 'tag'
483
- hash = {
484
- 'flush_interval' => 1,
485
- 'flush_thread_burst_interval' => 0.1,
486
- 'retry_type' => :periodic,
487
- 'retry_wait' => 3,
488
- 'retry_randomize' => false,
489
- 'queued_chunks_limit_size' => 100
490
- }
491
- @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
492
- @i.register(:prefer_buffered_processing){ true }
493
- @i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
494
- @i.register(:write){|chunk| raise "yay, your #write must fail" }
495
- @i.start
496
- @i.after_start
497
-
498
- @i.interrupt_flushes
499
-
500
- now = Time.parse('2016-04-13 18:33:30 -0700')
501
- Timecop.freeze( now )
502
-
503
- @i.emit_events("test.tag.1", dummy_event_stream())
504
-
505
- now = Time.parse('2016-04-13 18:33:32 -0700')
506
- Timecop.freeze( now )
507
-
508
- @i.enqueue_thread_wait
509
-
510
- @i.flush_thread_wakeup
511
- waiting(4){ Thread.pass until @i.write_count > 0 }
512
-
513
- assert{ @i.write_count > 0 }
514
- assert{ @i.num_errors > 0 }
515
-
516
- now = @i.next_flush_time
517
- Timecop.freeze( now )
518
- @i.flush_thread_wakeup
519
- waiting(4){ Thread.pass until @i.write_count > 1 }
520
-
521
- assert{ @i.write_count > 1 }
522
- assert{ @i.num_errors > 1 }
523
- end
524
-
525
- test 'output plugin give retries up by retry_timeout, and clear queue in buffer' do
526
- written_tags = []
527
-
528
- chunk_key = 'tag'
529
- hash = {
530
- 'flush_interval' => 1,
531
- 'flush_thread_burst_interval' => 0.1,
532
- 'retry_type' => :periodic,
533
- 'retry_wait' => 30,
534
- 'retry_randomize' => false,
535
- 'retry_timeout' => 120,
536
- 'queued_chunks_limit_size' => 100
537
- }
538
- @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
539
- @i.register(:prefer_buffered_processing){ true }
540
- @i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
541
- @i.register(:write){|chunk| written_tags << chunk.metadata.tag; raise "yay, your #write must fail" }
542
- @i.start
543
- @i.after_start
544
-
545
- @i.interrupt_flushes
546
-
547
- now = Time.parse('2016-04-13 18:33:30 -0700')
548
- Timecop.freeze( now )
549
-
550
- @i.emit_events("test.tag.1", dummy_event_stream())
551
-
552
- now = Time.parse('2016-04-13 18:33:31 -0700')
553
- Timecop.freeze( now )
554
-
555
- @i.emit_events("test.tag.2", dummy_event_stream())
556
-
557
- assert_equal 0, @i.write_count
558
- assert_equal 0, @i.num_errors
559
-
560
- @i.enqueue_thread_wait
561
- @i.flush_thread_wakeup
562
- waiting(4){ Thread.pass until @i.write_count > 0 && @i.num_errors > 0 }
563
-
564
- assert{ @i.buffer.queue.size > 0 }
565
- assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
566
-
567
- assert{ @i.write_count > 0 }
568
- assert{ @i.num_errors > 0 }
569
-
570
- prev_write_count = @i.write_count
571
- prev_num_errors = @i.num_errors
572
-
573
- first_failure = @i.retry.start
574
-
575
- 3.times do |i|
576
- now = @i.next_flush_time
577
-
578
- Timecop.freeze( now )
579
- @i.enqueue_thread_wait
580
- @i.flush_thread_wakeup
581
- waiting(4){ Thread.pass until @i.write_count > prev_write_count && @i.num_errors > prev_num_errors }
582
-
583
- assert{ @i.write_count > prev_write_count }
584
- assert{ @i.num_errors > prev_num_errors }
585
-
586
- prev_write_count = @i.write_count
587
- prev_num_errors = @i.num_errors
588
- end
589
-
590
- assert{ @i.next_flush_time >= first_failure + 120 }
591
-
592
- assert{ @i.buffer.queue.size == 2 }
593
- assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
594
- assert{ @i.buffer.stage.size == 0 }
595
-
596
- assert{ written_tags.all?{|t| t == 'test.tag.1' } }
597
-
598
- chunks = @i.buffer.queue.dup
599
-
600
- @i.emit_events("test.tag.3", dummy_event_stream())
601
-
602
- now = @i.next_flush_time
603
- Timecop.freeze( now )
604
- @i.flush_thread_wakeup
605
- waiting(4){ Thread.pass until @i.write_count > prev_write_count && @i.num_errors > prev_num_errors }
606
-
607
- logs = @i.log.out.logs
608
-
609
- target_time = Time.parse("2016-04-13 18:35:31 -0700")
610
- target_msg = "[error]: Hit limit for retries. dropping all chunks in the buffer queue."
611
- assert{ logs.any?{|l| l.include?(target_msg) } }
612
-
613
- log_time = get_log_time(target_msg, logs)
614
- assert_equal target_time.localtime, log_time.localtime
615
-
616
- assert{ @i.buffer.queue.size == 0 }
617
- assert{ @i.buffer.stage.size == 1 }
618
- assert{ chunks.all?{|c| c.empty? } }
619
- end
620
-
621
- test 'retry_max_times can limit maximum times for retries' do
622
- written_tags = []
623
-
624
- chunk_key = 'tag'
625
- hash = {
626
- 'flush_interval' => 1,
627
- 'flush_thread_burst_interval' => 0.1,
628
- 'retry_type' => :periodic,
629
- 'retry_wait' => 3,
630
- 'retry_randomize' => false,
631
- 'retry_max_times' => 10,
632
- 'queued_chunks_limit_size' => 100
633
- }
634
- @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
635
- @i.register(:prefer_buffered_processing){ true }
636
- @i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
637
- @i.register(:write){|chunk| written_tags << chunk.metadata.tag; raise "yay, your #write must fail" }
638
- @i.start
639
- @i.after_start
640
-
641
- @i.interrupt_flushes
642
-
643
- now = Time.parse('2016-04-13 18:33:30 -0700')
644
- Timecop.freeze( now )
645
-
646
- @i.emit_events("test.tag.1", dummy_event_stream())
647
-
648
- now = Time.parse('2016-04-13 18:33:31 -0700')
649
- Timecop.freeze( now )
650
-
651
- @i.emit_events("test.tag.2", dummy_event_stream())
652
-
653
- assert_equal 0, @i.write_count
654
- assert_equal 0, @i.num_errors
655
-
656
- @i.enqueue_thread_wait
657
- @i.flush_thread_wakeup
658
- waiting(4){ Thread.pass until @i.write_count > 0 && @i.num_errors > 0 }
659
-
660
- assert{ @i.buffer.queue.size > 0 }
661
- assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
662
-
663
- assert{ @i.write_count > 0 }
664
- assert{ @i.num_errors > 0 }
665
-
666
- prev_write_count = @i.write_count
667
- prev_num_errors = @i.num_errors
668
-
669
- _first_failure = @i.retry.start
670
-
671
- chunks = @i.buffer.queue.dup
672
-
673
- 20.times do |i|
674
- now = @i.next_flush_time
675
-
676
- Timecop.freeze( now )
677
- @i.enqueue_thread_wait
678
- @i.flush_thread_wakeup
679
- waiting(4){ Thread.pass until @i.write_count > prev_write_count && @i.num_errors > prev_num_errors }
680
-
681
- assert{ @i.write_count > prev_write_count }
682
- assert{ @i.num_errors > prev_num_errors }
683
-
684
- break if @i.buffer.queue.size == 0
685
-
686
- prev_write_count = @i.write_count
687
- prev_num_errors = @i.num_errors
688
-
689
- assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
690
- end
691
- assert{ @i.buffer.stage.size == 0 }
692
- assert{ written_tags.all?{|t| t == 'test.tag.1' } }
693
-
694
-
695
- @i.emit_events("test.tag.3", dummy_event_stream())
696
-
697
- logs = @i.log.out.logs
698
- assert{ logs.any?{|l| l.include?("[error]: Hit limit for retries. dropping all chunks in the buffer queue.") && l.include?("retry_times=10") } }
699
-
700
- assert{ @i.buffer.queue.size == 0 }
701
- assert{ @i.buffer.stage.size == 1 }
702
- assert{ chunks.all?{|c| c.empty? } }
703
- end
704
-
705
- test 'Do not retry when retry_max_times is 0' do
706
- written_tags = []
707
-
708
- chunk_key = 'tag'
709
- hash = {
710
- 'flush_interval' => 1,
711
- 'flush_thread_burst_interval' => 0.1,
712
- 'retry_type' => :periodic,
713
- 'retry_wait' => 1,
714
- 'retry_randomize' => false,
715
- 'retry_max_times' => 0,
716
- 'queued_chunks_limit_size' => 100
717
- }
718
- @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
719
- @i.register(:prefer_buffered_processing){ true }
720
- @i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
721
- @i.register(:write){|chunk| written_tags << chunk.metadata.tag; raise "yay, your #write must fail" }
722
- @i.start
723
- @i.after_start
724
-
725
- @i.interrupt_flushes
726
-
727
- now = Time.parse('2016-04-13 18:33:30 -0700')
728
- Timecop.freeze( now )
729
-
730
- @i.emit_events("test.tag.1", dummy_event_stream())
731
-
732
- now = Time.parse('2016-04-13 18:33:31 -0700')
733
- Timecop.freeze( now )
734
-
735
- @i.emit_events("test.tag.2", dummy_event_stream())
736
-
737
- assert_equal(0, @i.write_count)
738
- assert_equal(0, @i.num_errors)
739
-
740
- @i.enqueue_thread_wait
741
- @i.flush_thread_wakeup
742
- waiting(2){ Thread.pass until @i.write_count == 1 && @i.num_errors == 1 }
743
-
744
- assert(@i.write_count == 1)
745
- assert(@i.num_errors == 1)
746
- assert(@i.log.out.logs.any?{|l| l.include?("[error]: Hit limit for retries. dropping all chunks in the buffer queue.") && l.include?("retry_times=0") })
747
- assert(@i.buffer.queue.size == 0)
748
- assert(@i.buffer.stage.size == 1)
749
- assert(@i.buffer.queue.all?{|c| c.empty? })
750
- end
751
- end
752
-
753
- sub_test_case 'buffered output configured as retry_forever' do
754
- setup do
755
- Fluent::Plugin.register_output('output_retries_secondary_test', FluentPluginOutputAsBufferedRetryTest::DummyFullFeatureOutput2)
756
- end
757
-
758
- test 'warning logs are generated if secondary section is configured' do
759
- chunk_key = 'tag'
760
- hash = {
761
- 'retry_forever' => true,
762
- 'retry_randomize' => false,
763
- }
764
- i = create_output()
765
- i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash),config_element('secondary','', {'@type' => 'output_retries_secondary_test'})]))
766
- logs = i.log.out.logs
767
- assert { logs.any? { |l| l.include?("<secondary> with 'retry_forever', only unrecoverable errors are moved to secondary") } }
768
- end
769
-
770
- test 'retry_timeout and retry_max_times will be ignored if retry_forever is true for exponential backoff' do
771
- written_tags = []
772
-
773
- chunk_key = 'tag'
774
- hash = {
775
- 'flush_interval' => 1,
776
- 'flush_thread_burst_interval' => 0.1,
777
- 'retry_type' => :exponential_backoff,
778
- 'retry_forever' => true,
779
- 'retry_randomize' => false,
780
- 'retry_timeout' => 3600,
781
- 'retry_max_times' => 10,
782
- 'queued_chunks_limit_size' => 100
783
- }
784
- @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
785
- @i.register(:prefer_buffered_processing){ true }
786
- @i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
787
- @i.register(:write){|chunk| written_tags << chunk.metadata.tag; raise "yay, your #write must fail" }
788
- @i.start
789
- @i.after_start
790
-
791
- @i.interrupt_flushes
792
-
793
- now = Time.parse('2016-04-13 18:33:30 -0700')
794
- Timecop.freeze( now )
795
-
796
- @i.emit_events("test.tag.1", dummy_event_stream())
797
-
798
- now = Time.parse('2016-04-13 18:33:31 -0700')
799
- Timecop.freeze( now )
800
-
801
- @i.emit_events("test.tag.2", dummy_event_stream())
802
-
803
- assert_equal 0, @i.write_count
804
- assert_equal 0, @i.num_errors
805
-
806
- @i.enqueue_thread_wait
807
- @i.flush_thread_wakeup
808
- waiting(4){ sleep 0.1 until @i.write_count > 0 && @i.num_errors > 0 }
809
-
810
- assert{ @i.buffer.queue.size > 0 }
811
- assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
812
-
813
- assert{ @i.write_count > 0 }
814
- assert{ @i.num_errors > 0 }
815
-
816
- prev_write_count = @i.write_count
817
- prev_num_errors = @i.num_errors
818
-
819
- first_failure = @i.retry.start
820
-
821
- 15.times do |i|
822
- now = @i.next_flush_time
823
-
824
- Timecop.freeze( now + 1 )
825
- @i.enqueue_thread_wait
826
- @i.flush_thread_wakeup
827
- waiting(4){ sleep 0.1 until @i.write_count > prev_write_count && @i.num_errors > prev_num_errors }
828
-
829
- assert{ @i.write_count > prev_write_count }
830
- assert{ @i.num_errors > prev_num_errors }
831
-
832
- prev_write_count = @i.write_count
833
- prev_num_errors = @i.num_errors
834
- end
835
-
836
- assert{ @i.buffer.queue.size == 2 }
837
- assert{ @i.retry.steps > 10 }
838
- assert{ now > first_failure + 3600 }
839
- end
840
-
841
- test 'retry_timeout and retry_max_times will be ignored if retry_forever is true for periodical retries' do
842
- written_tags = []
843
-
844
- chunk_key = 'tag'
845
- hash = {
846
- 'flush_interval' => 1,
847
- 'flush_thread_burst_interval' => 0.1,
848
- 'retry_type' => :periodic,
849
- 'retry_forever' => true,
850
- 'retry_randomize' => false,
851
- 'retry_wait' => 30,
852
- 'retry_timeout' => 360,
853
- 'retry_max_times' => 10,
854
- 'queued_chunks_limit_size' => 100
855
- }
856
- @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
857
- @i.register(:prefer_buffered_processing){ true }
858
- @i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
859
- @i.register(:write){|chunk| written_tags << chunk.metadata.tag; raise "yay, your #write must fail" }
860
- @i.start
861
- @i.after_start
862
-
863
- @i.interrupt_flushes
864
-
865
- now = Time.parse('2016-04-13 18:33:30 -0700')
866
- Timecop.freeze( now )
867
-
868
- @i.emit_events("test.tag.1", dummy_event_stream())
869
-
870
- now = Time.parse('2016-04-13 18:33:31 -0700')
871
- Timecop.freeze( now )
872
-
873
- @i.emit_events("test.tag.2", dummy_event_stream())
874
-
875
- assert_equal 0, @i.write_count
876
- assert_equal 0, @i.num_errors
877
-
878
- @i.enqueue_thread_wait
879
- @i.flush_thread_wakeup
880
- waiting(4){ sleep 0.1 until @i.write_count > 0 && @i.num_errors > 0 }
881
-
882
- assert{ @i.buffer.queue.size > 0 }
883
- assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
884
-
885
- assert{ @i.write_count > 0 }
886
- assert{ @i.num_errors > 0 }
887
-
888
- prev_write_count = @i.write_count
889
- prev_num_errors = @i.num_errors
890
-
891
- first_failure = @i.retry.start
892
-
893
- 15.times do |i|
894
- now = @i.next_flush_time
895
-
896
- Timecop.freeze( now + 1 )
897
- @i.enqueue_thread_wait
898
- @i.flush_thread_wakeup
899
- waiting(4){ sleep 0.1 until @i.write_count > prev_write_count && @i.num_errors > prev_num_errors }
900
-
901
- assert{ @i.write_count > prev_write_count }
902
- assert{ @i.num_errors > prev_num_errors }
903
-
904
- prev_write_count = @i.write_count
905
- prev_num_errors = @i.num_errors
906
- end
907
-
908
- assert{ @i.buffer.queue.size == 2 }
909
- assert{ @i.retry.steps > 10 }
910
- assert{ now > first_failure + 360 }
911
- end
912
- end
913
-
914
- sub_test_case 'buffered output with delayed commit' do
915
- test 'does retries correctly when #try_write fails' do
916
- chunk_key = 'tag'
917
- hash = {
918
- 'flush_interval' => 1,
919
- 'flush_thread_burst_interval' => 0.1,
920
- 'retry_randomize' => false,
921
- 'retry_max_interval' => 60 * 60,
922
- }
923
- @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
924
- @i.register(:prefer_buffered_processing){ true }
925
- @i.register(:prefer_delayed_commit){ true }
926
- @i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
927
- @i.register(:try_write){|chunk| raise "yay, your #write must fail" }
928
- @i.start
929
- @i.after_start
930
-
931
- @i.interrupt_flushes
932
-
933
- now = Time.parse('2016-04-13 18:33:30 -0700')
934
- Timecop.freeze( now )
935
-
936
- @i.emit_events("test.tag.1", dummy_event_stream())
937
-
938
- now = Time.parse('2016-04-13 18:33:32 -0700')
939
- Timecop.freeze( now )
940
-
941
- @i.enqueue_thread_wait
942
-
943
- @i.flush_thread_wakeup
944
- waiting(4){ Thread.pass until @i.write_count > 0 }
945
- waiting(4) do
946
- state = @i.instance_variable_get(:@output_flush_threads).first
947
- state.thread.status == 'sleep'
948
- end
949
-
950
- assert(@i.write_count > 0)
951
- assert(@i.num_errors > 0)
952
-
953
- now = @i.next_flush_time
954
- Timecop.freeze( now )
955
- @i.flush_thread_wakeup
956
- waiting(4){ Thread.pass until @i.write_count > 1 }
957
- waiting(4) do
958
- state = @i.instance_variable_get(:@output_flush_threads).first
959
- state.thread.status == 'sleep'
960
- end
961
-
962
- assert(@i.write_count > 1)
963
- assert(@i.num_errors > 1)
964
- end
965
- end
966
- end