fluentd 0.10.35 → 0.10.36

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of fluentd might be problematic. Click here for more details.

@@ -0,0 +1,13 @@
1
+ language: ruby
2
+
3
+ rvm:
4
+ - 1.9.2
5
+ - 1.9.3
6
+ - 2.0.0
7
+ - rbx-19mode
8
+
9
+ branches:
10
+ only:
11
+ - master
12
+
13
+ script: bundle exec rake test
data/ChangeLog CHANGED
@@ -1,3 +1,12 @@
1
+ Release 0.10.36 - 2013/07/31
2
+
3
+ * Log: add class name on each error logs
4
+ * out_foward: change log level of node detachment/recovering/standby-failover from info to warn
5
+ * out_file: add symlink_path parameter to point to latest written file
6
+ * in_forward, in_http, in_unix: add backlog parameter to change resource backlog size
7
+ * add --dry-run option to check fluentd setup without actual run
8
+ * Fluentd now assumes configuration file written in UTF-8
9
+ * update cool.io version to 1.1.1
1
10
 
2
11
  Release 0.10.35 - 2013/06/12
3
12
 
@@ -21,7 +21,7 @@ Gem::Specification.new do |gem|
21
21
  gem.add_runtime_dependency(%q<msgpack>, [">= 0.4.4", "!= 0.5.0", "!= 0.5.1", "!= 0.5.2", "!= 0.5.3", "< 0.6.0"])
22
22
  gem.add_runtime_dependency(%q<json>, [">= 1.4.3"])
23
23
  gem.add_runtime_dependency(%q<yajl-ruby>, ["~> 1.0"])
24
- gem.add_runtime_dependency(%q<cool.io>, ["~> 1.1.0"])
24
+ gem.add_runtime_dependency(%q<cool.io>, ["~> 1.1.1"])
25
25
  gem.add_runtime_dependency(%q<http_parser.rb>, ["~> 0.5.1"])
26
26
 
27
27
  gem.add_development_dependency(%q<rake>, [">= 0.9.2"])
@@ -16,288 +16,284 @@
16
16
  # limitations under the License.
17
17
  #
18
18
  module Fluent
19
+ class BufferError < StandardError
20
+ end
19
21
 
22
+ class BufferChunkLimitError < BufferError
23
+ end
20
24
 
21
- class BufferError < StandardError
22
- end
23
-
24
- class BufferChunkLimitError < BufferError
25
- end
26
-
27
- class BufferQueueLimitError < BufferError
28
- end
25
+ class BufferQueueLimitError < BufferError
26
+ end
29
27
 
30
28
 
31
- class Buffer
32
- include Configurable
29
+ class Buffer
30
+ include Configurable
33
31
 
34
- def initialize
35
- super
36
- end
32
+ def initialize
33
+ super
34
+ end
37
35
 
38
- def configure(conf)
39
- super
40
- end
36
+ def configure(conf)
37
+ super
38
+ end
41
39
 
42
- def start
43
- end
40
+ def start
41
+ end
44
42
 
45
- def shutdown
46
- end
43
+ def shutdown
44
+ end
47
45
 
48
- def before_shutdown(out)
49
- end
46
+ def before_shutdown(out)
47
+ end
50
48
 
51
- #def emit(key, data, chain)
52
- #end
49
+ #def emit(key, data, chain)
50
+ #end
53
51
 
54
- #def keys
55
- #end
52
+ #def keys
53
+ #end
56
54
 
57
- #def push(key)
58
- #end
55
+ #def push(key)
56
+ #end
59
57
 
60
- #def pop(out)
61
- #end
58
+ #def pop(out)
59
+ #end
62
60
 
63
- #def clear!
64
- #end
65
- end
61
+ #def clear!
62
+ #end
63
+ end
66
64
 
67
65
 
68
- class BufferChunk
69
- include MonitorMixin
66
+ class BufferChunk
67
+ include MonitorMixin
70
68
 
71
- def initialize(key)
72
- super()
73
- @key = key
74
- end
69
+ def initialize(key)
70
+ super()
71
+ @key = key
72
+ end
75
73
 
76
- attr_reader :key
74
+ attr_reader :key
77
75
 
78
- #def <<(data)
79
- #end
76
+ #def <<(data)
77
+ #end
80
78
 
81
- #def size
82
- #end
79
+ #def size
80
+ #end
83
81
 
84
- def empty?
85
- size == 0
86
- end
82
+ def empty?
83
+ size == 0
84
+ end
87
85
 
88
- #def close
89
- #end
86
+ #def close
87
+ #end
90
88
 
91
- #def purge
92
- #end
89
+ #def purge
90
+ #end
93
91
 
94
- #def read
95
- #end
92
+ #def read
93
+ #end
96
94
 
97
- #def open
98
- #end
95
+ #def open
96
+ #end
99
97
 
100
- def write_to(io)
101
- open {|i|
102
- FileUtils.copy_stream(i, io)
103
- }
104
- end
98
+ def write_to(io)
99
+ open {|i|
100
+ FileUtils.copy_stream(i, io)
101
+ }
102
+ end
105
103
 
106
- def msgpack_each(&block)
107
- open {|io|
108
- u = MessagePack::Unpacker.new(io)
109
- begin
110
- u.each(&block)
111
- rescue EOFError
112
- end
113
- }
104
+ def msgpack_each(&block)
105
+ open {|io|
106
+ u = MessagePack::Unpacker.new(io)
107
+ begin
108
+ u.each(&block)
109
+ rescue EOFError
110
+ end
111
+ }
112
+ end
114
113
  end
115
- end
116
114
 
117
115
 
118
- class BasicBuffer < Buffer
119
- include MonitorMixin
116
+ class BasicBuffer < Buffer
117
+ include MonitorMixin
120
118
 
121
- def initialize
122
- super
123
- @parallel_pop = true
124
- end
119
+ def initialize
120
+ super
121
+ @parallel_pop = true
122
+ end
125
123
 
126
- def enable_parallel(b=true)
127
- @parallel_pop = b
128
- end
124
+ def enable_parallel(b=true)
125
+ @parallel_pop = b
126
+ end
129
127
 
130
- # This configuration assumes plugins to send records to a remote server.
131
- # Local file based plugins which should provide more reliability and efficiency
132
- # should override buffer_chunk_limit with a larger size.
133
- config_param :buffer_chunk_limit, :size, :default => 8*1024*1024
134
- config_param :buffer_queue_limit, :integer, :default => 256
128
+ # This configuration assumes plugins to send records to a remote server.
129
+ # Local file based plugins which should provide more reliability and efficiency
130
+ # should override buffer_chunk_limit with a larger size.
131
+ config_param :buffer_chunk_limit, :size, :default => 8*1024*1024
132
+ config_param :buffer_queue_limit, :integer, :default => 256
135
133
 
136
- alias chunk_limit buffer_chunk_limit
137
- alias chunk_limit= buffer_chunk_limit=
138
- alias queue_limit buffer_queue_limit
139
- alias queue_limit= buffer_queue_limit=
134
+ alias chunk_limit buffer_chunk_limit
135
+ alias chunk_limit= buffer_chunk_limit=
136
+ alias queue_limit buffer_queue_limit
137
+ alias queue_limit= buffer_queue_limit=
140
138
 
141
- def configure(conf)
142
- super
143
- end
139
+ def configure(conf)
140
+ super
141
+ end
144
142
 
145
- def start
146
- @queue, @map = resume
147
- @queue.extend(MonitorMixin)
148
- end
143
+ def start
144
+ @queue, @map = resume
145
+ @queue.extend(MonitorMixin)
146
+ end
149
147
 
150
- def shutdown
151
- synchronize do
152
- @queue.synchronize do
153
- until @queue.empty?
154
- @queue.shift.close
148
+ def shutdown
149
+ synchronize do
150
+ @queue.synchronize do
151
+ until @queue.empty?
152
+ @queue.shift.close
153
+ end
155
154
  end
155
+ @map.each_pair {|key,chunk|
156
+ chunk.close
157
+ }
156
158
  end
157
- @map.each_pair {|key,chunk|
158
- chunk.close
159
- }
160
159
  end
161
- end
162
-
163
- def emit(key, data, chain)
164
- key = key.to_s
165
160
 
166
- synchronize do
167
- top = (@map[key] ||= new_chunk(key)) # TODO generate unique chunk id
161
+ def emit(key, data, chain)
162
+ key = key.to_s
168
163
 
169
- if top.size + data.bytesize <= @buffer_chunk_limit
170
- chain.next
171
- top << data
172
- return false
164
+ synchronize do
165
+ top = (@map[key] ||= new_chunk(key)) # TODO generate unique chunk id
173
166
 
174
- ## FIXME
175
- #elsif data.bytesize > @buffer_chunk_limit
176
- # # TODO
177
- # raise BufferChunkLimitError, "received data too large"
167
+ if top.size + data.bytesize <= @buffer_chunk_limit
168
+ chain.next
169
+ top << data
170
+ return false
178
171
 
179
- elsif @queue.size >= @buffer_queue_limit
180
- raise BufferQueueLimitError, "queue size exceeds limit"
181
- end
172
+ ## FIXME
173
+ #elsif data.bytesize > @buffer_chunk_limit
174
+ # # TODO
175
+ # raise BufferChunkLimitError, "received data too large"
182
176
 
183
- if data.bytesize > @buffer_chunk_limit
184
- $log.warn "Size of the emitted data exceeds buffer_chunk_limit."
185
- $log.warn "This may occur problems in the output plugins ``at this server.``"
186
- $log.warn "To avoid problems, set a smaller number to the buffer_chunk_limit"
187
- $log.warn "in the forward output ``at the log forwarding server.``"
188
- end
177
+ elsif @queue.size >= @buffer_queue_limit
178
+ raise BufferQueueLimitError, "queue size exceeds limit"
179
+ end
189
180
 
190
- nc = new_chunk(key) # TODO generate unique chunk id
191
- ok = false
181
+ if data.bytesize > @buffer_chunk_limit
182
+ $log.warn "Size of the emitted data exceeds buffer_chunk_limit."
183
+ $log.warn "This may occur problems in the output plugins ``at this server.``"
184
+ $log.warn "To avoid problems, set a smaller number to the buffer_chunk_limit"
185
+ $log.warn "in the forward output ``at the log forwarding server.``"
186
+ end
192
187
 
193
- begin
194
- nc << data
195
- chain.next
188
+ nc = new_chunk(key) # TODO generate unique chunk id
189
+ ok = false
190
+
191
+ begin
192
+ nc << data
193
+ chain.next
194
+
195
+ flush_trigger = false
196
+ @queue.synchronize {
197
+ enqueue(top)
198
+ flush_trigger = @queue.empty?
199
+ @queue << top
200
+ @map[key] = nc
201
+ }
202
+
203
+ ok = true
204
+ return flush_trigger
205
+ ensure
206
+ nc.purge unless ok
207
+ end
196
208
 
197
- flush_trigger = false
198
- @queue.synchronize {
199
- enqueue(top)
200
- flush_trigger = @queue.empty?
201
- @queue << top
202
- @map[key] = nc
203
- }
209
+ end # synchronize
210
+ end
204
211
 
205
- ok = true
206
- return flush_trigger
207
- ensure
208
- nc.purge unless ok
209
- end
212
+ def keys
213
+ @map.keys
214
+ end
210
215
 
211
- end # synchronize
212
- end
216
+ def queue_size
217
+ @queue.size
218
+ end
213
219
 
214
- def keys
215
- @map.keys
216
- end
220
+ def total_queued_chunk_size
221
+ total = 0
222
+ @map.each_value {|c|
223
+ total += c.size
224
+ }
225
+ @queue.each {|c|
226
+ total += c.size
227
+ }
228
+ total
229
+ end
217
230
 
218
- def queue_size
219
- @queue.size
220
- end
231
+ #def new_chunk(key)
232
+ #end
221
233
 
222
- def total_queued_chunk_size
223
- total = 0
224
- @map.each_value {|c|
225
- total += c.size
226
- }
227
- @queue.each {|c|
228
- total += c.size
229
- }
230
- total
231
- end
234
+ #def resume
235
+ #end
232
236
 
233
- #def new_chunk(key)
234
- #end
237
+ #def enqueue(chunk)
238
+ #end
235
239
 
236
- #def resume
237
- #end
240
+ def push(key)
241
+ synchronize do
242
+ top = @map[key]
243
+ if !top || top.empty?
244
+ return false
245
+ end
238
246
 
239
- #def enqueue(chunk)
240
- #end
247
+ @queue.synchronize do
248
+ enqueue(top)
249
+ @queue << top
250
+ @map.delete(key)
251
+ end
241
252
 
242
- def push(key)
243
- synchronize do
244
- top = @map[key]
245
- if !top || top.empty?
246
- return false
247
- end
253
+ return true
254
+ end # synchronize
255
+ end
248
256
 
257
+ def pop(out)
258
+ chunk = nil
249
259
  @queue.synchronize do
250
- enqueue(top)
251
- @queue << top
252
- @map.delete(key)
260
+ if @parallel_pop
261
+ chunk = @queue.find {|c| c.try_mon_enter }
262
+ return false unless chunk
263
+ else
264
+ chunk = @queue.first
265
+ return false unless chunk
266
+ return false unless chunk.try_mon_enter
267
+ end
253
268
  end
254
269
 
255
- return true
256
- end # synchronize
257
- end
270
+ begin
271
+ if !chunk.empty?
272
+ write_chunk(chunk, out)
273
+ end
274
+
275
+ @queue.delete_if {|c|
276
+ c.object_id == chunk.object_id
277
+ }
278
+
279
+ chunk.purge
258
280
 
259
- def pop(out)
260
- chunk = nil
261
- @queue.synchronize do
262
- if @parallel_pop
263
- chunk = @queue.find {|c| c.try_mon_enter }
264
- return false unless chunk
265
- else
266
- chunk = @queue.first
267
- return false unless chunk
268
- return false unless chunk.try_mon_enter
281
+ return !@queue.empty?
282
+ ensure
283
+ chunk.mon_exit
269
284
  end
270
285
  end
271
286
 
272
- begin
273
- if !chunk.empty?
274
- write_chunk(chunk, out)
275
- end
287
+ def write_chunk(chunk, out)
288
+ out.write(chunk)
289
+ end
276
290
 
277
- @queue.delete_if {|c|
278
- c.object_id == chunk.object_id
291
+ def clear!
292
+ @queue.delete_if {|chunk|
293
+ chunk.purge
294
+ true
279
295
  }
280
-
281
- chunk.purge
282
-
283
- return !@queue.empty?
284
- ensure
285
- chunk.mon_exit
286
296
  end
287
297
  end
288
-
289
- def write_chunk(chunk, out)
290
- out.write(chunk)
291
- end
292
-
293
- def clear!
294
- @queue.delete_if {|chunk|
295
- chunk.purge
296
- true
297
- }
298
- end
299
- end
300
-
301
-
302
298
  end
303
299