fluent-plugin-buffer-event_limited 0.1.2 → 0.1.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: da62097bd8e3f3ef5005a70271cf63a51eb7ffd8
4
- data.tar.gz: 1ebc0568345ce566851ace6206e49b303b0a4b98
3
+ metadata.gz: de3aaaafca6b7e849146aa11137d8925c79621ad
4
+ data.tar.gz: 3089551096e1a8b3f203492d37e275c6167d52bb
5
5
  SHA512:
6
- metadata.gz: f5498467b6ee27c0589e40af603f2e6079d1bf063f6dee59864598b6ec46a09d19955b2667d8fc96a6b403e85ef46c318778e57db4aafb1e214e08897c9b3da6
7
- data.tar.gz: 548e98a7238a41b3d752bb9985a0ca814689fd35d6c3e0846ab29e4afb87bfedd285420846217c6f2c0609cc58c7555cfc16bd6cbc7edfed42c5e45d73408b5d
6
+ metadata.gz: e9db61d4d7cf99dfce50fd88edf289e8ce5d617c744099e3ba72c8e91a47b679869726c087205bb695836b1651b6546b2439116df02872224e42160f61b1f5a1
7
+ data.tar.gz: db5259ef0fedd5d6a1d77fc4d2f35364c210ce398443d441098b54baa14f805bedef1148ded0c94e6ce655ec6b68f2370c65d49b78c97dc46606789ccd428a4b
data/README.md CHANGED
@@ -33,6 +33,9 @@ Options of `buffer_type file` are also available:
33
33
  buffer_type event_limited
34
34
  buffer_chunk_limit 10M
35
35
  buffer_chunk_records_limit 100
36
+ buffer_chunk_message_separator newline
37
+ # buffer_chunk_message_separator tab
38
+ # buffer_chunk_message_separator msgpack
36
39
  # other options...
37
40
  </match>
38
41
  ```
@@ -2,7 +2,7 @@
2
2
 
3
3
  Gem::Specification.new do |spec|
4
4
  spec.name = "fluent-plugin-buffer-event_limited"
5
- spec.version = "0.1.2"
5
+ spec.version = "0.1.3"
6
6
  spec.authors = ["TAGOMORI Satoshi", 'Gergo Sulymosi']
7
7
  spec.email = ["tagomoris@gmail.com", 'gergo.sulymosi@gmail.com']
8
8
  spec.description = %q{Fluentd memory buffer plugin with many types of chunk limits}
@@ -4,9 +4,9 @@ module Fluent
4
4
  class EventLimitedBufferChunk < FileBufferChunk
5
5
  attr_reader :record_counter
6
6
 
7
- def initialize(key, path, unique_id, mode = "a+", symlink_path = nil)
8
- super
9
- @record_counter = File.foreach(path).inject(0) { |c, _| c + 1 }
7
+ def initialize(key, path, unique_id, separator, mode = "a+", symlink_path = nil)
8
+ super(key, path, unique_id, mode = "a+", symlink_path = nil)
9
+ init_counter(path, separator)
10
10
  end
11
11
 
12
12
  def <<(data)
@@ -15,19 +15,36 @@ module Fluent
15
15
 
16
16
  return result
17
17
  end
18
+
19
+ private
20
+
21
+ def init_counter(path, separator)
22
+ @record_counter = \
23
+ case separator
24
+ when 'msgpack'
25
+ MessagePack::Unpacker.new(File.open(path)).each.inject(0) { |c, _| c + 1 }
26
+ when 'newline'
27
+ File.foreach(path, $/).inject(0) { |c, _| c + 1 }
28
+ when 'tab'
29
+ File.foreach(path, "\t").inject(0) { |c, _| c + 1 }
30
+ else
31
+ raise ArgumentError, "Separator #{separator.inspect} is not supported"
32
+ end
33
+ end
18
34
  end
19
35
 
20
36
  class EventLimitedFileBuffer < FileBuffer
21
37
  Fluent::Plugin.register_buffer('event_limited', self)
22
38
 
23
39
  config_param :buffer_chunk_records_limit, :integer, :default => Float::INFINITY
40
+ config_param :buffer_chunk_message_separator, :string, :default => 'msgpack'
24
41
 
25
42
  def new_chunk(key)
26
43
  encoded_key = encode_key(key)
27
44
  path, tsuffix = make_path(encoded_key, 'b')
28
45
  unique_id = tsuffix_to_unique_id(tsuffix)
29
46
 
30
- EventLimitedBufferChunk.new(key, path, unique_id, 'a+', @symlink_path)
47
+ EventLimitedBufferChunk.new(key, path, unique_id, @buffer_chunk_message_separator, 'a+', @symlink_path)
31
48
  end
32
49
 
33
50
  # Copied here from
@@ -46,27 +63,23 @@ module Fluent
46
63
  unique_id = tsuffix_to_unique_id(tsuffix)
47
64
 
48
65
  if bq == 'b'
49
- chunk = EventLimitedBufferChunk.new(key, path, unique_id, "a+")
66
+ chunk = EventLimitedBufferChunk.new(key, path, unique_id, @buffer_chunk_message_separator, "a+")
50
67
  maps << [timestamp, chunk]
51
68
  elsif bq == 'q'
52
- chunk = EventLimitedBufferChunk.new(key, path, unique_id, "r")
69
+ chunk = EventLimitedBufferChunk.new(key, path, unique_id, @buffer_chunk_message_separator, "r")
53
70
  queues << [timestamp, chunk]
54
71
  end
55
72
  end
56
73
  }
57
74
 
58
75
  map = {}
59
- maps.sort_by {|(timestamp,chunk)|
60
- timestamp
61
- }.each {|(timestamp,chunk)|
62
- map[chunk.key] = chunk
63
- }
76
+ maps
77
+ .sort_by { |(timestamp, chunk)| timestamp }
78
+ .each { |(timestamp, chunk)| map[chunk.key] = chunk }
64
79
 
65
- queue = queues.sort_by {|(timestamp,chunk)|
66
- timestamp
67
- }.map {|(timestamp,chunk)|
68
- chunk
69
- }
80
+ queue = queues
81
+ .sort_by { |(timestamp, _chunk)| timestamp }
82
+ .map { |(_timestamp, chunk)| chunk }
70
83
 
71
84
  return queue, map
72
85
  end
@@ -2,6 +2,7 @@ require_relative '../test_helper'
2
2
  require 'fluent/plugin/buf_event_limited'
3
3
  require_relative 'test_event_recorder_buffered_output'
4
4
  require_relative 'dummy_chain'
5
+ require 'msgpack'
5
6
 
6
7
  class Hash
7
8
  def corresponding_proxies
@@ -19,7 +20,7 @@ class EventLimitedFileBufferTest < Test::Unit::TestCase
19
20
  end
20
21
 
21
22
  def teardown
22
- FileUtils.remove_entry_secure @buffer_path
23
+ FileUtils.rmdir @buffer_path
23
24
  end
24
25
 
25
26
  def default_config
@@ -28,6 +29,7 @@ class EventLimitedFileBufferTest < Test::Unit::TestCase
28
29
  flush_interval 0.1
29
30
  try_flush_interval 0.03
30
31
  buffer_chunk_records_limit 10
32
+ buffer_chunk_message_separator newline
31
33
  buffer_path #{@buffer_path}
32
34
  ]
33
35
  end
@@ -38,6 +40,20 @@ class EventLimitedFileBufferTest < Test::Unit::TestCase
38
40
  .configure(conf)
39
41
  end
40
42
 
43
+ def create_buffer_with_attributes(config = {})
44
+ config = {
45
+ 'buffer_path' => @buffer_path,
46
+ 'buffer_chunk_message_separator' => 'newline'
47
+ }.merge(config)
48
+ buf = Fluent::EventLimitedFileBuffer.new
49
+ Fluent::EventLimitedFileBuffer.send(:class_variable_set, :'@@buffer_paths', {})
50
+ buf.configure(config)
51
+ prefix = buf.instance_eval{ @buffer_path_prefix }
52
+ suffix = buf.instance_eval{ @buffer_path_suffix }
53
+
54
+ [buf, prefix, suffix]
55
+ end
56
+
41
57
  def test_plugin_configuration
42
58
  output = create_driver.instance
43
59
  buffer = output.instance_variable_get(:@buffer)
@@ -46,6 +62,7 @@ class EventLimitedFileBufferTest < Test::Unit::TestCase
46
62
  assert_equal 0.1, output.flush_interval
47
63
  assert_equal 0.03, output.try_flush_interval
48
64
  assert_equal 10, buffer.buffer_chunk_records_limit
65
+ assert_equal 'newline', buffer.buffer_chunk_message_separator
49
66
  end
50
67
 
51
68
  def test_emit
@@ -86,57 +103,113 @@ class EventLimitedFileBufferTest < Test::Unit::TestCase
86
103
  assert_equal 2, buffer.instance_variable_get(:@map)[''].record_counter
87
104
  end
88
105
 
89
- def test_resume
90
- buf1 = Fluent::EventLimitedFileBuffer.new
91
- buf1.configure({'buffer_path' => @buffer_path})
92
- prefix = buf1.instance_eval{ @buffer_path_prefix }
93
- suffix = buf1.instance_eval{ @buffer_path_suffix }
94
-
106
+ def test_resume_from_plain_text_chunk
107
+ # Setup buffer to test chunks
108
+ buf1, prefix, suffix = create_buffer_with_attributes
95
109
  buf1.start
96
110
 
111
+ # Create chunks to test
97
112
  chunk1 = buf1.new_chunk('key1')
113
+ chunk2 = buf1.new_chunk('key2')
98
114
  assert_equal 0, chunk1.record_counter
99
- chunk1 << "data1\ndata2\n"
115
+ assert_equal 0, chunk2.record_counter
100
116
 
101
- chunk2 = buf1.new_chunk('key2')
117
+ # Write data into chunks
118
+ chunk1 << "data1\ndata2\n"
102
119
  chunk2 << "data3\ndata4\n"
103
120
 
104
- assert chunk1
105
- assert chunk1.path =~ /\A#{prefix}[-_.a-zA-Z0-9\%]+\.b[0-9a-f]+#{suffix}\Z/, "path from new_chunk must be a 'b' buffer chunk"
106
-
121
+ # Enqueue chunk1 and leave chunk2 open
107
122
  buf1.enqueue(chunk1)
123
+ assert \
124
+ chunk1.path =~ /\A#{prefix}[-_.a-zA-Z0-9\%]+\.q[0-9a-f]+#{suffix}\Z/,
125
+ "chunk1 must be enqueued"
126
+ assert \
127
+ chunk2.path =~ /\A#{prefix}[-_.a-zA-Z0-9\%]+\.b[0-9a-f]+#{suffix}\Z/,
128
+ "chunk2 is not enqueued yet"
129
+ buf1.shutdown
108
130
 
109
- assert chunk1
110
- assert chunk1.path =~ /\A#{prefix}[-_.a-zA-Z0-9\%]+\.q[0-9a-f]+#{suffix}\Z/, "chunk1 must be enqueued"
111
- assert chunk2
112
- assert chunk2.path =~ /\A#{prefix}[-_.a-zA-Z0-9\%]+\.b[0-9a-f]+#{suffix}\Z/, "chunk2 is not enqueued yet"
131
+ # Setup a new buffer to test resume
132
+ buf2, *_ = create_buffer_with_attributes
133
+ queue, map = buf2.resume
113
134
 
114
- buf1.shutdown
135
+ # Returns with the open and the closed buffers
136
+ assert_equal 1, queue.size # closed buffer
137
+ assert_equal 1, map.values.size # open buffer
115
138
 
116
- buf2 = Fluent::EventLimitedFileBuffer.new
117
- Fluent::EventLimitedFileBuffer.send(:class_variable_set, :'@@buffer_paths', {})
118
- buf2.configure({'buffer_path' => @buffer_path})
119
- prefix = buf2.instance_eval{ @buffer_path_prefix }
120
- suffix = buf2.instance_eval{ @buffer_path_suffix }
139
+ # The paths of the resumed chunks are the same but they themselfs are not
140
+ resumed_chunk1 = queue.first
141
+ resumed_chunk2 = map.values.first
142
+ assert_equal chunk1.path, resumed_chunk1.path
143
+ assert_equal chunk2.path, resumed_chunk2.path
144
+ assert chunk1 != resumed_chunk1
145
+ assert chunk2 != resumed_chunk2
121
146
 
122
- # buf1.start -> resume is normal operation, but now, we cannot it.
147
+ # Resume with the proper type of buffer chunk
148
+ assert_equal Fluent::EventLimitedBufferChunk, resumed_chunk1.class
149
+ assert_equal Fluent::EventLimitedBufferChunk, resumed_chunk2.class
150
+
151
+ assert_equal "data1\ndata2\n", resumed_chunk1.read
152
+ assert_equal "data3\ndata4\n", resumed_chunk2.read
153
+
154
+ assert_equal 2, resumed_chunk1.record_counter
155
+ assert_equal 2, resumed_chunk2.record_counter
156
+ end
157
+
158
+ def test_resume_from_msgpack_chunks
159
+ # Setup buffer to test chunks
160
+ buf1, prefix, suffix = create_buffer_with_attributes({'buffer_chunk_message_separator' => 'msgpack'})
161
+ buf1.start
162
+
163
+ # Create chunks to test
164
+ chunk1 = buf1.new_chunk('key1')
165
+ chunk2 = buf1.new_chunk('key2')
166
+ assert_equal 0, chunk1.record_counter
167
+ assert_equal 0, chunk2.record_counter
168
+
169
+ # Write data into chunks
170
+ chunk1 << MessagePack.pack('data1')
171
+ chunk1 << MessagePack.pack('data2')
172
+ chunk2 << MessagePack.pack('data3')
173
+ chunk2 << MessagePack.pack('data4')
174
+
175
+ # Enqueue chunk1 and leave chunk2 open
176
+ buf1.enqueue(chunk1)
177
+ assert \
178
+ chunk1.path =~ /\A#{prefix}[-_.a-zA-Z0-9\%]+\.q[0-9a-f]+#{suffix}\Z/,
179
+ "chunk1 must be enqueued"
180
+ assert \
181
+ chunk2.path =~ /\A#{prefix}[-_.a-zA-Z0-9\%]+\.b[0-9a-f]+#{suffix}\Z/,
182
+ "chunk2 is not enqueued yet"
183
+ buf1.shutdown
184
+
185
+ # Setup a new buffer to test resume
186
+ buf2, *_ = create_buffer_with_attributes({'buffer_chunk_message_separator' => 'msgpack'})
123
187
  queue, map = buf2.resume
124
188
 
125
- assert_equal 1, queue.size
126
- assert_equal 1, map.size
189
+ # Returns with the open and the closed buffers
190
+ assert_equal 1, queue.size # closed buffer
191
+ assert_equal 1, map.values.size # open buffer
127
192
 
193
+ # The paths of the resumed chunks are the same but they themselfs are not
128
194
  resumed_chunk1 = queue.first
195
+ resumed_chunk2 = map.values.first
129
196
  assert_equal chunk1.path, resumed_chunk1.path
130
- resumed_chunk2 = map['key2']
131
197
  assert_equal chunk2.path, resumed_chunk2.path
198
+ assert chunk1 != resumed_chunk1
199
+ assert chunk2 != resumed_chunk2
132
200
 
201
+ # Resume with the proper type of buffer chunk
133
202
  assert_equal Fluent::EventLimitedBufferChunk, resumed_chunk1.class
134
203
  assert_equal Fluent::EventLimitedBufferChunk, resumed_chunk2.class
135
204
 
205
+ assert_equal \
206
+ MessagePack.pack('data1') + MessagePack.pack('data2'),
207
+ resumed_chunk1.read
208
+ assert_equal \
209
+ MessagePack.pack('data3') + MessagePack.pack('data4'),
210
+ resumed_chunk2.read
211
+
136
212
  assert_equal 2, resumed_chunk1.record_counter
137
213
  assert_equal 2, resumed_chunk2.record_counter
138
-
139
- assert_equal "data1\ndata2\n", resumed_chunk1.read
140
- assert_equal "data3\ndata4\n", resumed_chunk2.read
141
214
  end
142
215
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fluent-plugin-buffer-event_limited
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.2
4
+ version: 0.1.3
5
5
  platform: ruby
6
6
  authors:
7
7
  - TAGOMORI Satoshi
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2015-11-09 00:00:00.000000000 Z
12
+ date: 2015-11-24 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: bundler