fluent-plugin-buffer-event_limited 0.1.0 → 0.1.1

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: bde16a6ec9ee256c8eb1d8eff67a03f15857a67a
4
- data.tar.gz: 93341c7a6f0b72d0003e907e31195a0e4703eddc
3
+ metadata.gz: 1ac06f3326a37f00dbce173463746fac8a77a693
4
+ data.tar.gz: 35b7b2257f3f435b25dd6cd743ad2634b98e5d86
5
5
  SHA512:
6
- metadata.gz: d83a6398da1b8e118cffbfb221802396d6da93b530176f526c59b64ce565d69c1975de6b52ba389d7d82707bdc060745c83a14a5f59a762d7907b7c486ad4dce
7
- data.tar.gz: 71ee96f41c83236bac796a0184d4dae7a6a9d532ffaf42599a4f2378091ffef9cbf25751033523b0e220c1a9acdb211622670222c555c359c5d0b501cbaea3ad
6
+ metadata.gz: 7eb922abcbee4971fdaf89999d508883d4120c4298f080a7c832c2e0905c1286c154f04d64486adcd83dc23ec42e8cada739d689e0f842a94a1d33affe6109a4
7
+ data.tar.gz: 73b1866f83553e3ca96cd7546c5b3c57d4dd4f5bf571eda5b82096f5b6d528f92825248852e0c5cc62cb6535152a6928d015ab40658e9c1edceac1c49df46ee3
data/README.md CHANGED
@@ -1,5 +1,7 @@
1
1
  # fluent-plugin-buffer-event_limited
2
2
 
3
+ This gem is a mutation of the [fluent-plugin-buffer-lightening](https://github.com/tagomoris/fluent-plugin-buffer-lightening) buffer plugin by [tagomoris](https://github.com/tagomoris).
4
+
3
5
  [Fluentd](http://fluentd.org) buffer plugin on memory to flush with many types of chunk limit methods:
4
6
  * events count limit in chunk
5
7
 
@@ -49,13 +51,3 @@ For more frequently flushing, use `flush_interval` and `try_flush_interval` with
49
51
  </match>
50
52
  ```
51
53
 
52
- ## TODO
53
-
54
- * more limit patterns
55
- * patches welcome!
56
-
57
- ## Copyright
58
-
59
- * Copyright (c) 2013- TAGOMORI Satoshi (tagomoris)
60
- * License
61
- * Apache License, Version 2.0
@@ -2,7 +2,7 @@
2
2
 
3
3
  Gem::Specification.new do |spec|
4
4
  spec.name = "fluent-plugin-buffer-event_limited"
5
- spec.version = "0.1.0"
5
+ spec.version = "0.1.1"
6
6
  spec.authors = ["TAGOMORI Satoshi", 'Gergo Sulymosi']
7
7
  spec.email = ["tagomoris@gmail.com", 'gergo.sulymosi@gmail.com']
8
8
  spec.description = %q{Fluentd memory buffer plugin with many types of chunk limits}
@@ -6,12 +6,14 @@ module Fluent
6
6
 
7
7
  def initialize(key, path, unique_id, mode = "a+", symlink_path = nil)
8
8
  super
9
- @record_counter = 0
9
+ @record_counter = %x{wc -l < "#{path}"}.to_i
10
10
  end
11
11
 
12
12
  def <<(data)
13
- super
13
+ result = super
14
14
  @record_counter += 1
15
+
16
+ return result
15
17
  end
16
18
  end
17
19
 
@@ -28,6 +30,47 @@ module Fluent
28
30
  EventLimitedBufferChunk.new(key, path, unique_id, 'a+', @symlink_path)
29
31
  end
30
32
 
33
+ # Copied here from
34
+ # https://github.com/fluent/fluentd/blob/d3ae305b6e7521fafac6ad30c6b0a8763c363b65/lib/fluent/plugin/buf_file.rb#L128-L165
35
+ def resume
36
+ maps = []
37
+ queues = []
38
+
39
+ Dir.glob("#{@buffer_path_prefix}*#{@buffer_path_suffix}") {|path|
40
+ identifier_part = chunk_identifier_in_path(path)
41
+ if m = PATH_MATCH.match(identifier_part)
42
+ key = decode_key(m[1])
43
+ bq = m[2]
44
+ tsuffix = m[3]
45
+ timestamp = m[3].to_i(16)
46
+ unique_id = tsuffix_to_unique_id(tsuffix)
47
+
48
+ if bq == 'b'
49
+ chunk = EventLimitedBufferChunk.new(key, path, unique_id, "a+")
50
+ maps << [timestamp, chunk]
51
+ elsif bq == 'q'
52
+ chunk = EventLimitedBufferChunk.new(key, path, unique_id, "r")
53
+ queues << [timestamp, chunk]
54
+ end
55
+ end
56
+ }
57
+
58
+ map = {}
59
+ maps.sort_by {|(timestamp,chunk)|
60
+ timestamp
61
+ }.each {|(timestamp,chunk)|
62
+ map[chunk.key] = chunk
63
+ }
64
+
65
+ queue = queues.sort_by {|(timestamp,chunk)|
66
+ timestamp
67
+ }.map {|(timestamp,chunk)|
68
+ chunk
69
+ }
70
+
71
+ return queue, map
72
+ end
73
+
31
74
  def storable?(chunk, data)
32
75
  chunk.record_counter < @buffer_chunk_records_limit &&
33
76
  (chunk.size + data.bytesize) <= @buffer_chunk_limit
@@ -1,15 +1,25 @@
1
1
  require_relative '../test_helper'
2
+ require 'fluent/plugin/buf_event_limited'
2
3
  require_relative 'test_event_recorder_buffered_output'
3
4
  require_relative 'dummy_chain'
4
5
 
6
+ class Hash
7
+ def corresponding_proxies
8
+ @corresponding_proxies ||= []
9
+ end
10
+
11
+ def to_masked_element
12
+ self
13
+ end
14
+ end
15
+
5
16
  class EventLimitedFileBufferTest < Test::Unit::TestCase
6
17
  def setup
7
- @buffer = Tempfile.new('event-limited-file-buffer')
18
+ @buffer_path = Dir.mktmpdir('event-limited-file-buffer')
8
19
  end
9
20
 
10
21
  def teardown
11
- @buffer.close
12
- @buffer.unlink
22
+ FileUtils.remove_entry_secure @buffer_path
13
23
  end
14
24
 
15
25
  def default_config
@@ -18,7 +28,7 @@ class EventLimitedFileBufferTest < Test::Unit::TestCase
18
28
  flush_interval 0.1
19
29
  try_flush_interval 0.03
20
30
  buffer_chunk_records_limit 10
21
- buffer_path #{@buffer.path}
31
+ buffer_path #{@buffer_path}
22
32
  ]
23
33
  end
24
34
 
@@ -75,4 +85,58 @@ class EventLimitedFileBufferTest < Test::Unit::TestCase
75
85
  assert !buffer.emit(tag, d.instance.format(tag, time, {"a" => 12}), chain)
76
86
  assert_equal 2, buffer.instance_variable_get(:@map)[''].record_counter
77
87
  end
88
+
89
+ def test_resume
90
+ buf1 = Fluent::EventLimitedFileBuffer.new
91
+ buf1.configure({'buffer_path' => @buffer_path})
92
+ prefix = buf1.instance_eval{ @buffer_path_prefix }
93
+ suffix = buf1.instance_eval{ @buffer_path_suffix }
94
+
95
+ buf1.start
96
+
97
+ chunk1 = buf1.new_chunk('key1')
98
+ assert_equal 0, chunk1.record_counter
99
+ chunk1 << "data1\ndata2\n"
100
+
101
+ chunk2 = buf1.new_chunk('key2')
102
+ chunk2 << "data3\ndata4\n"
103
+
104
+ assert chunk1
105
+ assert chunk1.path =~ /\A#{prefix}[-_.a-zA-Z0-9\%]+\.b[0-9a-f]+#{suffix}\Z/, "path from new_chunk must be a 'b' buffer chunk"
106
+
107
+ buf1.enqueue(chunk1)
108
+
109
+ assert chunk1
110
+ assert chunk1.path =~ /\A#{prefix}[-_.a-zA-Z0-9\%]+\.q[0-9a-f]+#{suffix}\Z/, "chunk1 must be enqueued"
111
+ assert chunk2
112
+ assert chunk2.path =~ /\A#{prefix}[-_.a-zA-Z0-9\%]+\.b[0-9a-f]+#{suffix}\Z/, "chunk2 is not enqueued yet"
113
+
114
+ buf1.shutdown
115
+
116
+ buf2 = Fluent::EventLimitedFileBuffer.new
117
+ Fluent::EventLimitedFileBuffer.send(:class_variable_set, :'@@buffer_paths', {})
118
+ buf2.configure({'buffer_path' => @buffer_path})
119
+ prefix = buf2.instance_eval{ @buffer_path_prefix }
120
+ suffix = buf2.instance_eval{ @buffer_path_suffix }
121
+
122
+ # buf1.start -> resume is normal operation, but now, we cannot it.
123
+ queue, map = buf2.resume
124
+
125
+ assert_equal 1, queue.size
126
+ assert_equal 1, map.size
127
+
128
+ resumed_chunk1 = queue.first
129
+ assert_equal chunk1.path, resumed_chunk1.path
130
+ resumed_chunk2 = map['key2']
131
+ assert_equal chunk2.path, resumed_chunk2.path
132
+
133
+ assert_equal Fluent::EventLimitedBufferChunk, resumed_chunk1.class
134
+ assert_equal Fluent::EventLimitedBufferChunk, resumed_chunk2.class
135
+
136
+ assert_equal 2, resumed_chunk1.record_counter
137
+ assert_equal 2, resumed_chunk2.record_counter
138
+
139
+ assert_equal "data1\ndata2\n", resumed_chunk1.read
140
+ assert_equal "data3\ndata4\n", resumed_chunk2.read
141
+ end
78
142
  end
data/test/test_helper.rb CHANGED
@@ -10,5 +10,4 @@ unless ENV.has_key?('VERBOSE')
10
10
  end
11
11
 
12
12
  require 'fluent/test'
13
- require 'fluent/plugin/buf_event_limited'
14
13
  Fluent::Test.setup
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fluent-plugin-buffer-event_limited
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.0
4
+ version: 0.1.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - TAGOMORI Satoshi
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2015-10-23 00:00:00.000000000 Z
12
+ date: 2015-11-09 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: bundler