fluentd 1.16.0-x64-mingw-ucrt → 1.16.1-x64-mingw-ucrt

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: f9e448700741217b09469f92b1a37561487c01c9c7ecfd076cb8e62d4af9666d
4
- data.tar.gz: d6931b08a3b5d3f1682c98616ecf98a68b54626232d9353af23db8294ef05f4f
3
+ metadata.gz: d6632beecaf31a952039ff32a351d78fbb8557dad3daa3e1504a5743c1c3be28
4
+ data.tar.gz: 4c13b5ae7568abdbaee281ed304e7c09c245ed437abf3f5b81fef49675bfdbba
5
5
  SHA512:
6
- metadata.gz: 4f0e946b11ad1e975a3377df0635ab806edd2a2bb7acd1d09116362c209ac3a8c57eaf39d9c9436ad5dc71f2d7dab00350d085de6e70e3daec559a0171eab06d
7
- data.tar.gz: f49a6aa1450a15469968fe76261e530ef6d4b166719e4ae4ac2c67bed0273738c5d7e5e476c0c63bf259ed0d691bc96a3227d168486f2dfaac201338dd734111
6
+ metadata.gz: 7e224fbe03562be373b6f0f9e21aa9c8128b21ce440877f8563600a1fd04515b0b19e96a0d0604ba50a76ea47a3e8f63f2554ff0f0f93f81e3e749774d8037f8
7
+ data.tar.gz: 417580c1a7d60a3e8cfaa8646eb392418042b7b62c320d3a71602469aca328e399e6a50a73417eecf78d9c8de8a51c1fb943e664d3ff88e5ab168ac6408bf92b
@@ -1,5 +1,6 @@
1
1
  name: Bug Report
2
2
  description: Create a report with a procedure for reproducing the bug
3
+ labels: "waiting-for-triage"
3
4
  body:
4
5
  - type: markdown
5
6
  attributes:
@@ -1,5 +1,6 @@
1
1
  name: Feature request
2
2
  description: Suggest an idea for this project
3
+ labels: "waiting-for-triage"
3
4
  body:
4
5
  - type: markdown
5
6
  attributes:
@@ -0,0 +1,24 @@
1
+ name: "Mark or close stale issues and PRs"
2
+ on:
3
+ schedule:
4
+ - cron: "00 10 * * *"
5
+
6
+ jobs:
7
+ stale:
8
+ runs-on: ubuntu-latest
9
+ steps:
10
+ - uses: actions/stale@v8
11
+ with:
12
+ repo-token: ${{ secrets.GITHUB_TOKEN }}
13
+ days-before-stale: 30
14
+ days-before-close: 7
15
+ stale-issue-message: "This issue has been automatically marked as stale because it has been open 30 days with no activity. Remove stale label or comment or this issue will be closed in 7 days"
16
+ stale-pr-message: "This PR has been automatically marked as stale because it has been open 30 days with no activity. Remove stale label or comment or this PR will be closed in 7 days"
17
+ close-issue-message: "This issue was automatically closed because of stale in 7 days"
18
+ close-pr-message: "This PR was automatically closed because of stale in 7 days"
19
+ stale-pr-label: "stale"
20
+ stale-issue-label: "stale"
21
+ exempt-issue-labels: "waiting-for-triage,bug,enhancement,feature request,pending,work_in_progress,v1,v2"
22
+ exempt-pr-labels: "waiting-for-triage,bug,enhancement,feature request,pending,work_in_progress,v1,v2"
23
+ exempt-all-assignees: true
24
+ exempt-all-milestones: true
data/CHANGELOG.md CHANGED
@@ -1,5 +1,42 @@
1
1
  # v1.16
2
2
 
3
+ ## Release v1.16.1 - 2023/04/17
4
+
5
+ ### Enhancement
6
+
7
+ * in_tcp: Add `message_length_limit` to drop large incoming data
8
+ https://github.com/fluent/fluentd/pull/4137
9
+
10
+ ### Bug Fix
11
+
12
+ * Fix NameError of `SecondaryFileOutput` when setting secondary other than
13
+ `out_secondary_file`
14
+ https://github.com/fluent/fluentd/pull/4124
15
+ * Server helper: Suppress error of `UDPServer` over `max_bytes` on Windows
16
+ https://github.com/fluent/fluentd/pull/4131
17
+ * Buffer: Fix that `compress` setting causes unexpected error when receiving
18
+ already compressed MessagePack
19
+ https://github.com/fluent/fluentd/pull/4147
20
+
21
+ ### Misc
22
+
23
+ * Update MAINTAINERS.md
24
+ https://github.com/fluent/fluentd/pull/4119
25
+ * Update security policy
26
+ https://github.com/fluent/fluentd/pull/4123
27
+ * Plugin template: Remove unnecessary code
28
+ https://github.com/fluent/fluentd/pull/4128
29
+ * Revive issue auto closer
30
+ https://github.com/fluent/fluentd/pull/4116
31
+ * Fix a link for the repository of td-agent
32
+ https://github.com/fluent/fluentd/pull/4145
33
+ * in_udp: add test of message_length_limit
34
+ https://github.com/fluent/fluentd/pull/4117
35
+ * Fix a typo of an argument of `Fluent::EventStream#each`
36
+ https://github.com/fluent/fluentd/pull/4148
37
+ * Test in_tcp: Fix undesirable way to assert logs
38
+ https://github.com/fluent/fluentd/pull/4138
39
+
3
40
  ## Release v1.16.0 - 2023/03/29
4
41
 
5
42
  ### Enhancement
data/CONTRIBUTING.md CHANGED
@@ -27,7 +27,7 @@ submitting an issue to Fluentd. Even better you can submit a Pull Request with a
27
27
  * **Documentation**: Use [fluentd documentation](https://github.com/fluent/fluentd-docs-gitbook) repository.
28
28
 
29
29
  If you find a bug of 3rd party plugins, please submit an issue to each plugin repository.
30
- And use [omnibus-td-agent](https://github.com/treasure-data/omnibus-td-agent) repository for td-agent related issues.
30
+ And use [fluent-package-builder](https://github.com/fluent/fluent-package-builder) repository for td-agent related issues.
31
31
 
32
32
  Note: Before report the issue, check latest version first. Sometimes users report fixed bug with older version.
33
33
 
data/MAINTAINERS.md CHANGED
@@ -3,11 +3,11 @@
3
3
  - [Naotoshi Seo](https://github.com/sonots), [ZOZO Technologies](https://tech.zozo.com/en/)
4
4
  - [Okkez](https://github.com/okkez)
5
5
  - [Hiroshi Hatake](https://github.com/cosmo0920), [Calyptia](https://calyptia.com/)
6
- - [Masahiro Nakagawa](https://github.com/repeatedly), [Treasure Data](https://www.treasuredata.com/)
7
- - [Satoshi Tagomori](https://github.com/tagomoris), [Treasure Data](https://www.treasuredata.com/)
6
+ - [Masahiro Nakagawa](https://github.com/repeatedly)
7
+ - [Satoshi Tagomori](https://github.com/tagomoris)
8
8
  - [Toru Takahashi](https://github.com/toru-takahashi), [Treasure Data](https://www.treasuredata.com/)
9
9
  - [Eduardo Silva](https://github.com/edsiper), [Calyptia](https://calyptia/)
10
- - [Fujimoto Seiji](https://github.com/fujimots), [ClearCode](https://www.clear-code.com/)
10
+ - [Fujimoto Seiji](https://github.com/fujimots)
11
11
  - [Takuro Ashie](https://github.com/ashie), [ClearCode](https://www.clear-code.com/)
12
12
  - [Kentaro Hayashi](https://github.com/kenhys), [ClearCode](https://www.clear-code.com/)
13
13
  - [Daijiro Fukuda](https://github.com/daipom), [ClearCode](https://www.clear-code.com/)
data/SECURITY.md CHANGED
@@ -4,15 +4,11 @@
4
4
 
5
5
  | Version | Supported |
6
6
  | ------- | ------------------ |
7
- | 1.14.x | :white_check_mark: |
8
- | <= 1.13.x | :x: |
7
+ | 1.16.x | :white_check_mark: |
8
+ | 1.15.x | :white_check_mark: |
9
+ | <= 1.14.x | :x: |
9
10
 
10
11
  ## Reporting a Vulnerability
11
12
 
12
- Please contact to current active maintainers. (in alphabetical order)
13
-
14
- * ashie@clear-code.com
15
- * fujimoto@clear-code.com
16
- * hatake@calyptia.com
17
- * hayashi@clear-code.com
18
-
13
+ Please post your vulnerability report from the following page:
14
+ https://github.com/fluent/fluentd/security/advisories
data/lib/fluent/event.rb CHANGED
@@ -49,7 +49,7 @@ module Fluent
49
49
  raise NotImplementedError, "DO NOT USE THIS CLASS directly."
50
50
  end
51
51
 
52
- def each(unapcker: nil, &block)
52
+ def each(unpacker: nil, &block)
53
53
  raise NotImplementedError, "DO NOT USE THIS CLASS directly."
54
54
  end
55
55
 
@@ -294,7 +294,7 @@ module Fluent
294
294
  super
295
295
  end
296
296
 
297
- def to_compressed_msgpack_stream(time_int: false)
297
+ def to_compressed_msgpack_stream(time_int: false, packer: nil)
298
298
  # time_int is always ignored because @data is always packed binary in this class
299
299
  @compressed_data
300
300
  end
@@ -36,6 +36,10 @@ module Fluent::Plugin
36
36
  desc "The field name of the client's address."
37
37
  config_param :source_address_key, :string, default: nil
38
38
 
39
+ # Setting default to nil for backward compatibility
40
+ desc "The max bytes of message."
41
+ config_param :message_length_limit, :size, default: nil
42
+
39
43
  config_param :blocking_timeout, :time, default: 0.5
40
44
 
41
45
  desc 'The payload is read up to this character.'
@@ -102,6 +106,7 @@ module Fluent::Plugin
102
106
 
103
107
  log.info "listening tcp socket", bind: @bind, port: @port
104
108
  del_size = @delimiter.length
109
+ discard_till_next_delimiter = false
105
110
  if @_extract_enabled && @_extract_tag_key
106
111
  server_create(:in_tcp_server_single_emit, @port, bind: @bind, resolve_name: !!@source_hostname_key, send_keepalive_packet: @send_keepalive_packet) do |data, conn|
107
112
  unless check_client(conn)
@@ -116,6 +121,16 @@ module Fluent::Plugin
116
121
  msg = buf[pos...i]
117
122
  pos = i + del_size
118
123
 
124
+ if discard_till_next_delimiter
125
+ discard_till_next_delimiter = false
126
+ next
127
+ end
128
+
129
+ if !@message_length_limit.nil? && @message_length_limit < msg.bytesize
130
+ log.info "The received data is larger than 'message_length_limit', dropped:", limit: @message_length_limit, size: msg.bytesize, head: msg[...32]
131
+ next
132
+ end
133
+
119
134
  @parser.parse(msg) do |time, record|
120
135
  unless time && record
121
136
  log.warn "pattern not matched", message: msg
@@ -131,6 +146,15 @@ module Fluent::Plugin
131
146
  end
132
147
  end
133
148
  buf.slice!(0, pos) if pos > 0
149
+ # If the buffer size exceeds the limit here, it means that the next message will definitely exceed the limit.
150
+ # So we should clear the buffer here. Otherwise, it will keep storing useless data until the next delimiter comes.
151
+ if !@message_length_limit.nil? && @message_length_limit < buf.bytesize
152
+ log.info "The buffer size exceeds 'message_length_limit', cleared:", limit: @message_length_limit, size: buf.bytesize, head: buf[...32]
153
+ buf.clear
154
+ # We should discard the subsequent data until the next delimiter comes.
155
+ discard_till_next_delimiter = true
156
+ next
157
+ end
134
158
  end
135
159
  else
136
160
  server_create(:in_tcp_server_batch_emit, @port, bind: @bind, resolve_name: !!@source_hostname_key, send_keepalive_packet: @send_keepalive_packet) do |data, conn|
@@ -147,6 +171,16 @@ module Fluent::Plugin
147
171
  msg = buf[pos...i]
148
172
  pos = i + del_size
149
173
 
174
+ if discard_till_next_delimiter
175
+ discard_till_next_delimiter = false
176
+ next
177
+ end
178
+
179
+ if !@message_length_limit.nil? && @message_length_limit < msg.bytesize
180
+ log.info "The received data is larger than 'message_length_limit', dropped:", limit: @message_length_limit, size: msg.bytesize, head: msg[...32]
181
+ next
182
+ end
183
+
150
184
  @parser.parse(msg) do |time, record|
151
185
  unless time && record
152
186
  log.warn "pattern not matched", message: msg
@@ -161,6 +195,15 @@ module Fluent::Plugin
161
195
  end
162
196
  router.emit_stream(@tag, es)
163
197
  buf.slice!(0, pos) if pos > 0
198
+ # If the buffer size exceeds the limit here, it means that the next message will definitely exceed the limit.
199
+ # So we should clear the buffer here. Otherwise, it will keep storing useless data until the next delimiter comes.
200
+ if !@message_length_limit.nil? && @message_length_limit < buf.bytesize
201
+ log.info "The buffer size exceeds 'message_length_limit', cleared:", limit: @message_length_limit, size: buf.bytesize, head: buf[...32]
202
+ buf.clear
203
+ # We should discard the subsequent data until the next delimiter comes.
204
+ discard_till_next_delimiter = true
205
+ next
206
+ end
164
207
  end
165
208
  end
166
209
  end
@@ -426,7 +426,7 @@ module Fluent
426
426
  end
427
427
  @secondary.acts_as_secondary(self)
428
428
  @secondary.configure(secondary_conf)
429
- if (@secondary.class != SecondaryFileOutput) &&
429
+ if (@secondary.class.to_s != "Fluent::Plugin::SecondaryFileOutput") &&
430
430
  (self.class != @secondary.class) &&
431
431
  (@custom_format || @secondary.implement?(:custom_format))
432
432
  log.warn "Use different plugin for secondary. Check the plugin works with primary like secondary_file", primary: self.class.to_s, secondary: @secondary.class.to_s
@@ -545,6 +545,10 @@ module Fluent
545
545
  data = @sock.recv(@max_bytes, @flags)
546
546
  rescue Errno::EAGAIN, Errno::EWOULDBLOCK, Errno::EINTR, Errno::ECONNRESET, IOError, Errno::EBADF
547
547
  return
548
+ rescue Errno::EMSGSIZE
549
+ # Windows ONLY: This happens when the data size is larger than `@max_bytes`.
550
+ @log.info "A received data was ignored since it was too large."
551
+ return
548
552
  end
549
553
  @callback.call(data)
550
554
  rescue => e
@@ -558,6 +562,10 @@ module Fluent
558
562
  data, addr = @sock.recvfrom(@max_bytes)
559
563
  rescue Errno::EAGAIN, Errno::EWOULDBLOCK, Errno::EINTR, Errno::ECONNRESET, IOError, Errno::EBADF
560
564
  return
565
+ rescue Errno::EMSGSIZE
566
+ # Windows ONLY: This happens when the data size is larger than `@max_bytes`.
567
+ @log.info "A received data was ignored since it was too large."
568
+ return
561
569
  end
562
570
  @callback.call(data, UDPCallbackSocket.new(@sock, addr, close_socket: @close_socket))
563
571
  rescue => e
@@ -16,6 +16,6 @@
16
16
 
17
17
  module Fluent
18
18
 
19
- VERSION = '1.16.0'
19
+ VERSION = '1.16.1'
20
20
 
21
21
  end
@@ -1,4 +1,3 @@
1
- $LOAD_PATH.unshift(File.expand_path("../../", __FILE__))
2
1
  require "test-unit"
3
2
  require "fluent/test"
4
3
  require "fluent/test/driver/<%= type %>"
@@ -218,16 +218,14 @@ class TcpInputTest < Test::Unit::TestCase
218
218
  </client>
219
219
  </security>
220
220
  !)
221
- d.run(shutdown: false, expect_records: 1, timeout: 2) do
221
+ d.run(expect_records: 1, timeout: 2) do
222
222
  create_tcp_socket('127.0.0.1', @port) do |sock|
223
223
  sock.send("hello\n", 0)
224
224
  end
225
225
  end
226
226
 
227
- assert_equal 1, d.instance.log.logs.count { |l| l =~ /anonymous client/ }
227
+ assert_equal 1, d.logs.count { |l| l =~ /anonymous client/ }
228
228
  assert_equal 0, d.events.size
229
- ensure
230
- d.instance_shutdown if d&.instance
231
229
  end
232
230
  end
233
231
 
@@ -255,4 +253,76 @@ class TcpInputTest < Test::Unit::TestCase
255
253
  assert_equal 'hello', event[2]['msg']
256
254
  end
257
255
  end
256
+
257
+ sub_test_case "message_length_limit" do
258
+ data("batch_emit", { extract: "" }, keep: true)
259
+ data("single_emit", { extract: "<extract>\ntag_key tag\n</extract>\n" }, keep: true)
260
+ test "drop records exceeding limit" do |data|
261
+ message_length_limit = 10
262
+ d = create_driver(base_config + %!
263
+ message_length_limit #{message_length_limit}
264
+ <parse>
265
+ @type none
266
+ </parse>
267
+ #{data[:extract]}
268
+ !)
269
+ d.run(expect_records: 2, timeout: 10) do
270
+ create_tcp_socket('127.0.0.1', @port) do |sock|
271
+ sock.send("a" * message_length_limit + "\n", 0)
272
+ sock.send("b" * (message_length_limit + 1) + "\n", 0)
273
+ sock.send("c" * (message_length_limit - 1) + "\n", 0)
274
+ end
275
+ end
276
+
277
+ expected_records = [
278
+ "a" * message_length_limit,
279
+ "c" * (message_length_limit - 1)
280
+ ]
281
+ actual_records = d.events.collect do |event|
282
+ event[2]["message"]
283
+ end
284
+
285
+ assert_equal expected_records, actual_records
286
+ end
287
+
288
+ test "clear buffer and discard the subsequent data until the next delimiter" do |data|
289
+ message_length_limit = 12
290
+ d = create_driver(base_config + %!
291
+ message_length_limit #{message_length_limit}
292
+ delimiter ";"
293
+ <parse>
294
+ @type json
295
+ </parse>
296
+ #{data[:extract]}
297
+ !)
298
+ d.run(expect_records: 1, timeout: 10) do
299
+ create_tcp_socket('127.0.0.1', @port) do |sock|
300
+ sock.send('{"message":', 0)
301
+ sock.send('"hello', 0)
302
+ sleep 1 # To make the server read data and clear the buffer here.
303
+ sock.send('world!"};', 0) # This subsequent data must be discarded so that a parsing failure doesn't occur.
304
+ sock.send('{"k":"v"};', 0) # This will succeed to parse.
305
+ end
306
+ end
307
+
308
+ logs = d.logs.collect do |log|
309
+ log.gsub(/\A\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2} [-+]\d{4} /, "")
310
+ end
311
+ actual_records = d.events.collect do |event|
312
+ event[2]
313
+ end
314
+
315
+ assert_equal(
316
+ {
317
+ # Asserting that '[warn]: pattern not matched message="world!\"}"' warning does not occur.
318
+ logs: ['[info]: The buffer size exceeds \'message_length_limit\', cleared: limit=12 size=17 head="{\"message\":\"hello"' + "\n"],
319
+ records: [{"k" => "v"}],
320
+ },
321
+ {
322
+ logs: logs[1..],
323
+ records: actual_records,
324
+ }
325
+ )
326
+ end
327
+ end
258
328
  end
@@ -265,4 +265,32 @@ class UdpInputTest < Test::Unit::TestCase
265
265
  end
266
266
  end
267
267
  end
268
+
269
+ test 'message_length_limit' do
270
+ message_length_limit = 32
271
+ d = create_driver(base_config + %!
272
+ format none
273
+ message_length_limit #{message_length_limit}
274
+ !)
275
+ d.run(expect_records: 3) do
276
+ create_udp_socket('127.0.0.1', @port) do |u|
277
+ 3.times do |i|
278
+ u.send("#{i}" * 40 + "\n", 0)
279
+ end
280
+ end
281
+ end
282
+
283
+ if Fluent.windows?
284
+ expected_records = []
285
+ else
286
+ expected_records = 3.times.collect do |i|
287
+ "#{i}" * message_length_limit
288
+ end
289
+ end
290
+ actual_records = d.events.collect do |event|
291
+ event[2]["message"]
292
+ end
293
+
294
+ assert_equal expected_records, actual_records
295
+ end
268
296
  end
@@ -35,6 +35,16 @@ module FluentPluginOutputAsBufferedCompressTest
35
35
  @format ? @format.call(tag, time, record) : [tag, time, record].to_json
36
36
  end
37
37
  end
38
+
39
+ def self.dummy_event_stream
40
+ Fluent::ArrayEventStream.new(
41
+ [
42
+ [event_time('2016-04-13 18:33:00'), { 'name' => 'moris', 'age' => 36, 'message' => 'data1' }],
43
+ [event_time('2016-04-13 18:33:13'), { 'name' => 'moris', 'age' => 36, 'message' => 'data2' }],
44
+ [event_time('2016-04-13 18:33:32'), { 'name' => 'moris', 'age' => 36, 'message' => 'data3' }],
45
+ ]
46
+ )
47
+ end
38
48
  end
39
49
 
40
50
  class BufferedOutputCompressTest < Test::Unit::TestCase
@@ -60,16 +70,6 @@ class BufferedOutputCompressTest < Test::Unit::TestCase
60
70
  end
61
71
  end
62
72
 
63
- def dummy_event_stream
64
- Fluent::ArrayEventStream.new(
65
- [
66
- [event_time('2016-04-13 18:33:00'), { 'name' => 'moris', 'age' => 36, 'message' => 'data1' }],
67
- [event_time('2016-04-13 18:33:13'), { 'name' => 'moris', 'age' => 36, 'message' => 'data2' }],
68
- [event_time('2016-04-13 18:33:32'), { 'name' => 'moris', 'age' => 36, 'message' => 'data3' }],
69
- ]
70
- )
71
- end
72
-
73
73
  TMP_DIR = File.expand_path('../../tmp/test_output_as_buffered_compress', __FILE__)
74
74
 
75
75
  setup do
@@ -89,20 +89,34 @@ class BufferedOutputCompressTest < Test::Unit::TestCase
89
89
  end
90
90
 
91
91
  data(
92
- handle_simple_stream: config_element('buffer', '', { 'flush_interval' => 1, 'compress' => 'gzip' }),
93
- handle_stream_with_standard_format: config_element('buffer', 'tag', { 'flush_interval' => 1, 'compress' => 'gzip' }),
94
- handle_simple_stream_and_file_chunk: config_element('buffer', '', { '@type' => 'file', 'path' => File.join(TMP_DIR,'test.*.log'), 'flush_interval' => 1, 'compress' => 'gzip' }),
95
- handle_stream_with_standard_format_and_file_chunk: config_element('buffer', 'tag', { '@type' => 'file', 'path' => File.join(TMP_DIR,'test.*.log'), 'flush_interval' => 1, 'compress' => 'gzip' }),
92
+ :buffer_config,
93
+ [
94
+ config_element('buffer', '', { 'flush_interval' => 1, 'compress' => 'gzip' }),
95
+ config_element('buffer', 'tag', { 'flush_interval' => 1, 'compress' => 'gzip' }),
96
+ config_element('buffer', '', { '@type' => 'file', 'path' => File.join(TMP_DIR,'test.*.log'), 'flush_interval' => 1, 'compress' => 'gzip' }),
97
+ config_element('buffer', 'tag', { '@type' => 'file', 'path' => File.join(TMP_DIR,'test.*.log'), 'flush_interval' => 1, 'compress' => 'gzip' }),
98
+ ],
96
99
  )
97
- test 'call a standard format when output plugin adds data to chunk' do |buffer_config|
100
+ data(
101
+ :input_es,
102
+ [
103
+ FluentPluginOutputAsBufferedCompressTest.dummy_event_stream,
104
+ # If already compressed data is incoming, it must be written as is (i.e. without decompressed).
105
+ # https://github.com/fluent/fluentd/issues/4146
106
+ Fluent::CompressedMessagePackEventStream.new(FluentPluginOutputAsBufferedCompressTest.dummy_event_stream.to_compressed_msgpack_stream),
107
+ ],
108
+ )
109
+ test 'call a standard format when output plugin adds data to chunk' do |data|
110
+ buffer_config = data[:buffer_config]
111
+ es = data[:input_es].dup # Note: the data matrix is shared in all patterns, so we need `dup` here.
112
+
98
113
  @i = create_output(:async)
99
114
  @i.configure(config_element('ROOT','', {}, [buffer_config]))
100
115
  @i.start
101
116
  @i.after_start
102
117
 
103
118
  io = StringIO.new
104
- es = dummy_event_stream
105
- expected = es.map { |e| e }
119
+ expected = es.dup.map { |t, r| [t, r] }
106
120
  compressed_data = ''
107
121
 
108
122
  assert_equal :gzip, @i.buffer.compress
@@ -138,7 +152,7 @@ class BufferedOutputCompressTest < Test::Unit::TestCase
138
152
  @i.after_start
139
153
 
140
154
  io = StringIO.new
141
- es = dummy_event_stream
155
+ es = FluentPluginOutputAsBufferedCompressTest.dummy_event_stream
142
156
  expected = es.map { |e| "#{e[1]}\n" }.join # e[1] is record
143
157
  compressed_data = ''
144
158
 
@@ -29,6 +29,7 @@ class ServerPluginHelperTest < Test::Unit::TestCase
29
29
  ENV['SERVERENGINE_SOCKETMANAGER_PATH'] = @socket_manager_path.to_s
30
30
 
31
31
  @d = Dummy.new
32
+ @d.under_plugin_development = true
32
33
  @d.start
33
34
  @d.after_start
34
35
  end
@@ -794,6 +795,50 @@ class ServerPluginHelperTest < Test::Unit::TestCase
794
795
  end
795
796
  end
796
797
  end
798
+
799
+ sub_test_case 'over max_bytes' do
800
+ data("cut off on Non-Windows", { max_bytes: 32, records: ["a" * 40], expected: ["a" * 32] }, keep: true) unless Fluent.windows?
801
+ data("drop on Windows", { max_bytes: 32, records: ["a" * 40], expected: [] }, keep: true) if Fluent.windows?
802
+ test 'with sock' do |data|
803
+ max_bytes, records, expected = data.values
804
+
805
+ actual_records = []
806
+ @d.server_create_udp(:myserver, @port, max_bytes: max_bytes) do |data, sock|
807
+ actual_records << data
808
+ end
809
+
810
+ open_client(:udp, "127.0.0.1", @port) do |sock|
811
+ records.each do |record|
812
+ sock.send(record, 0)
813
+ end
814
+ end
815
+
816
+ waiting(10) { sleep 0.1 until actual_records.size >= expected.size }
817
+ sleep 1 if expected.size == 0 # To confirm no record recieved.
818
+
819
+ assert_equal expected, actual_records
820
+ end
821
+
822
+ test 'without sock' do |data|
823
+ max_bytes, records, expected = data.values
824
+
825
+ actual_records = []
826
+ @d.server_create_udp(:myserver, @port, max_bytes: max_bytes) do |data|
827
+ actual_records << data
828
+ end
829
+
830
+ open_client(:udp, "127.0.0.1", @port) do |sock|
831
+ records.each do |record|
832
+ sock.send(record, 0)
833
+ end
834
+ end
835
+
836
+ waiting(10) { sleep 0.1 until actual_records.size >= expected.size }
837
+ sleep 1 if expected.size == 0 # To confirm no record recieved.
838
+
839
+ assert_equal expected, actual_records
840
+ end
841
+ end
797
842
  end
798
843
 
799
844
  module CertUtil
@@ -1575,6 +1620,10 @@ class ServerPluginHelperTest < Test::Unit::TestCase
1575
1620
 
1576
1621
  def open_client(proto, addr, port)
1577
1622
  client = case proto
1623
+ when :udp
1624
+ c = UDPSocket.open
1625
+ c.connect(addr, port)
1626
+ c
1578
1627
  when :tcp
1579
1628
  TCPSocket.open(addr, port)
1580
1629
  when :tls
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fluentd
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.16.0
4
+ version: 1.16.1
5
5
  platform: x64-mingw-ucrt
6
6
  authors:
7
7
  - Sadayuki Furuhashi
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2023-03-29 00:00:00.000000000 Z
11
+ date: 2023-04-17 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: bundler
@@ -442,6 +442,7 @@ files:
442
442
  - ".github/PULL_REQUEST_TEMPLATE.md"
443
443
  - ".github/workflows/linux-test.yaml"
444
444
  - ".github/workflows/macos-test.yaml"
445
+ - ".github/workflows/stale-actions.yml"
445
446
  - ".github/workflows/windows-test.yaml"
446
447
  - ".gitignore"
447
448
  - ADOPTERS.md