fluent-plugin-cloudwatch-logs 0.0.4 → 0.0.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 4515d3ff1d51227199dd771c142b25f543c04ef0
4
- data.tar.gz: 784e81df02faae8bad44ddafca8397464c95b967
3
+ metadata.gz: c919aefdaa503c87f9e0e031c4bb26154da3f5ce
4
+ data.tar.gz: f6be0442947f82c255e17a2d7bb7a4e63e76ebc7
5
5
  SHA512:
6
- metadata.gz: 189047028dc9a27bba67e2aa6603064a0fd0be5f25aece8e347e8838a905460ca0ae4428f1c640c382cae42801d70defa7f54112e06f8757744c60a28bc8e0cd
7
- data.tar.gz: ea197c9342d43a29c218baeb772dd9da217726cf90f35eed7d8c03e37878be5c50905369cb24c4c5a59904d62614c0fd2629e0c06dd239645d70f5de09377671
6
+ metadata.gz: c77b226dcd3ed30b749edda0df7579cf1bacd68c51d53db5c6811cbd4193b459ffed3e716b992fd9e3808f5a154f99eddf7809927dee708ec81019013d928c18
7
+ data.tar.gz: 1ffe8e5f35cfac5e4ed7484ad07270933d259910a4714f5c52a9b436eb18b5f340edcab0b875ef2e33eb79d14cbb9d54ec75a732cd90ad3a212a27b306402e52
data/README.md CHANGED
@@ -66,10 +66,10 @@ Fetch sample log from CloudWatch Logs:
66
66
  type cloudwatch_logs
67
67
  log_group_name log-group-name
68
68
  log_stream_name log-stream-name
69
- sequence_token_file /var/lib/fluent/group_stream.out.seq
70
69
  auto_create_stream true
71
70
  #message_keys key1,key2,key3,...
72
71
  #max_message_length 32768
72
+ #use_tag_as_group false
73
73
  </match>
74
74
  ```
75
75
 
@@ -79,6 +79,7 @@ Fetch sample log from CloudWatch Logs:
79
79
  * `auto_create_stream`: to create log group and stream automatically
80
80
  * `message_keys`: keys to send messages as events
81
81
  * `max_message_length`: maximum length of the message
82
+ * `use_tag_as_group`: to use tag as a group name
82
83
 
83
84
  ### in_cloudwatch_logs
84
85
 
@@ -2,7 +2,7 @@ module Fluent
2
2
  module Plugin
3
3
  module Cloudwatch
4
4
  module Logs
5
- VERSION = "0.0.4"
5
+ VERSION = "0.0.5"
6
6
  end
7
7
  end
8
8
  end
@@ -5,12 +5,12 @@ module Fluent
5
5
  config_param :aws_key_id, :string, :default => nil
6
6
  config_param :aws_sec_key, :string, :default => nil
7
7
  config_param :region, :string, :default => nil
8
- config_param :log_group_name, :string
8
+ config_param :log_group_name, :string, :default => nil
9
9
  config_param :log_stream_name, :string
10
- config_param :sequence_token_file, :string
11
10
  config_param :auto_create_stream, :bool, default: false
12
11
  config_param :message_keys, :string, :default => nil
13
12
  config_param :max_message_length, :integer, :default => nil
13
+ config_param :use_tag_as_group, :bool, :default => false
14
14
 
15
15
  unless method_defined?(:log)
16
16
  define_method(:log) { $log }
@@ -29,8 +29,7 @@ module Fluent
29
29
  options[:credentials] = Aws::Credentials.new(@aws_key_id, @aws_sec_key) if @aws_key_id && @aws_sec_key
30
30
  options[:region] = @region if @region
31
31
  @logs = Aws::CloudWatchLogs::Client.new(options)
32
-
33
- create_stream if @auto_create_stream
32
+ @sequence_tokens = {}
34
33
  end
35
34
 
36
35
  def format(tag, time, record)
@@ -39,62 +38,110 @@ module Fluent
39
38
 
40
39
  def write(chunk)
41
40
  events = []
42
- chunk.msgpack_each do |tag, time, record|
43
- time_ms = time * 1000
44
-
45
- if @message_keys
46
- message = @message_keys.split(',').map {|k| record[k].to_s }.join(' ')
47
- else
48
- message = record.to_json
41
+ chunk.enum_for(:msgpack_each).chunk {|tag, time, record|
42
+ tag
43
+ }.each {|tag, rs|
44
+ group_name = @use_tag_as_group ? tag : @log_group_name
45
+
46
+ unless log_group_exists?(group_name)
47
+ if @auto_create_stream
48
+ create_log_group(group_name)
49
+ else
50
+ log.warn "Log group '#{group_name}' dose not exists"
51
+ next
52
+ end
49
53
  end
50
54
 
51
- if @max_message_length
52
- message.force_encoding('ASCII-8BIT')
53
- message = message.slice(0, @max_message_length)
55
+ unless log_stream_exists?(group_name, @log_stream_name)
56
+ if @auto_create_stream
57
+ create_log_stream(group_name, @log_stream_name)
58
+ else
59
+ log.warn "Log stream '#{@log_stream_name}' dose not exists"
60
+ next
61
+ end
54
62
  end
55
63
 
56
- events << {timestamp: time_ms, message: message}
57
- end
58
- put_events(events)
64
+ rs.each do |t, time, record|
65
+ time_ms = time * 1000
66
+
67
+ if @message_keys
68
+ message = @message_keys.split(',').map {|k| record[k].to_s }.join(' ')
69
+ else
70
+ message = record.to_json
71
+ end
72
+
73
+ if @max_message_length
74
+ message.force_encoding('ASCII-8BIT')
75
+ message = message.slice(0, @max_message_length)
76
+ end
77
+
78
+ events << {timestamp: time_ms, message: message}
79
+ end
80
+ put_events(group_name, events)
81
+ }
59
82
  end
60
83
 
61
84
  private
62
- def next_sequence_token
63
- return nil unless File.exist?(@sequence_token_file)
64
- open(@sequence_token_file) {|f| f.read }.chomp
85
+ def next_sequence_token(group_name, stream_name)
86
+ @sequence_tokens[group_name][stream_name]
65
87
  end
66
88
 
67
- def store_next_sequence_token(token)
68
- open(@sequence_token_file, 'w') do |f|
69
- f.write token
70
- end
89
+ def store_next_sequence_token(group_name, stream_name, token)
90
+ @sequence_tokens[group_name][stream_name] = token
71
91
  end
72
92
 
73
- def put_events(events)
93
+ def put_events(group_name, events)
74
94
  args = {
75
95
  log_events: events,
76
- log_group_name: @log_group_name,
96
+ log_group_name: group_name,
77
97
  log_stream_name: @log_stream_name,
78
98
  }
79
- args[:sequence_token] = next_sequence_token if next_sequence_token
99
+ token = next_sequence_token(group_name, @log_stream_name)
100
+ args[:sequence_token] = token if token
80
101
 
81
102
  response = @logs.put_log_events(args)
82
- store_next_sequence_token(response.next_sequence_token)
103
+ store_next_sequence_token(group_name, @log_stream_name, response.next_sequence_token)
83
104
  end
84
105
 
85
- def create_stream
86
- log.debug "Creating log stream '#{@log_stream_name}' in log group '#{@log_group_name}'"
87
-
106
+ def create_log_group(group_name)
88
107
  begin
89
- @logs.create_log_group(log_group_name: @log_group_name)
108
+ @logs.create_log_group(log_group_name: group_name)
109
+ @sequence_tokens[group_name] = {}
90
110
  rescue Aws::CloudWatchLogs::Errors::ResourceAlreadyExistsException
91
- log.debug "Log group '#{@log_group_name}' already exists"
111
+ log.debug "Log group '#{group_name}' already exists"
92
112
  end
113
+ end
93
114
 
115
+ def create_log_stream(group_name, stream_name)
94
116
  begin
95
- @logs.create_log_stream(log_group_name: @log_group_name, log_stream_name: @log_stream_name)
117
+ @logs.create_log_stream(log_group_name: group_name, log_stream_name: stream_name)
118
+ @sequence_tokens[group_name][stream_name] = nil
96
119
  rescue Aws::CloudWatchLogs::Errors::ResourceAlreadyExistsException
97
- log.debug "Log stream '#{@log_stream_name}' already exists"
120
+ log.debug "Log stream '#{stream_name}' already exists"
121
+ end
122
+ end
123
+
124
+ def log_group_exists?(group_name)
125
+ if @sequence_tokens[group_name]
126
+ true
127
+ elsif @logs.describe_log_groups.log_groups.any? {|i| i.log_group_name == group_name }
128
+ @sequence_tokens[group_name] = {}
129
+ true
130
+ else
131
+ false
132
+ end
133
+ end
134
+
135
+ def log_stream_exists?(group_name, stream_name)
136
+ if not @sequence_tokens[group_name]
137
+ false
138
+ elsif @sequence_tokens[group_name].has_key?(stream_name)
139
+ true
140
+ elsif (log_stream = @logs.describe_log_streams(log_group_name: group_name).log_streams.find {|i| i.log_stream_name == stream_name })
141
+ @sequence_tokens[group_name][stream_name] = log_stream.upload_sequence_token
142
+ true
143
+ else
144
+ false
98
145
  end
99
146
  end
100
147
  end
@@ -24,7 +24,6 @@ class CloudwatchLogsOutputTest < Test::Unit::TestCase
24
24
  region us-east-1
25
25
  log_group_name test_group
26
26
  log_stream_name test_stream
27
- sequence_token_file /tmp/sq
28
27
  auto_create_stream false
29
28
  EOC
30
29
 
@@ -33,7 +32,6 @@ class CloudwatchLogsOutputTest < Test::Unit::TestCase
33
32
  assert_equal('us-east-1', d.instance.region)
34
33
  assert_equal('test_group', d.instance.log_group_name)
35
34
  assert_equal('test_stream', d.instance.log_stream_name)
36
- assert_equal('/tmp/sq', d.instance.sequence_token_file)
37
35
  assert_equal(false, d.instance.auto_create_stream)
38
36
  end
39
37
 
@@ -103,6 +101,30 @@ class CloudwatchLogsOutputTest < Test::Unit::TestCase
103
101
  assert_equal('message2 l', events[1].message)
104
102
  end
105
103
 
104
+ def test_write_use_tag_as_group
105
+ new_log_stream
106
+
107
+ d = create_driver(<<-EOC)
108
+ #{default_config}
109
+ message_keys message,cloudwatch
110
+ use_tag_as_group true
111
+ EOC
112
+
113
+ time = Time.now
114
+ d.emit({'cloudwatch' => 'logs1', 'message' => 'message1'}, time.to_i)
115
+ d.emit({'cloudwatch' => 'logs2', 'message' => 'message2'}, time.to_i + 1)
116
+ d.run
117
+
118
+ sleep 20
119
+
120
+ events = get_log_events(fluentd_tag)
121
+ assert_equal(2, events.size)
122
+ assert_equal(time.to_i * 1000, events[0].timestamp)
123
+ assert_equal('message1 logs1', events[0].message)
124
+ assert_equal((time.to_i + 1) * 1000, events[1].timestamp)
125
+ assert_equal('message2 logs2', events[1].message)
126
+ end
127
+
106
128
  private
107
129
  def default_config
108
130
  <<-EOC
@@ -123,6 +145,6 @@ class CloudwatchLogsOutputTest < Test::Unit::TestCase
123
145
 
124
146
 
125
147
  def create_driver(conf = default_config)
126
- Fluent::Test::BufferedOutputTestDriver.new(Fluent::CloudwatchLogsOutput).configure(conf)
148
+ Fluent::Test::BufferedOutputTestDriver.new(Fluent::CloudwatchLogsOutput, fluentd_tag).configure(conf)
127
149
  end
128
150
  end
data/test/test_helper.rb CHANGED
@@ -40,9 +40,17 @@ module CloudwatchLogsTestHelper
40
40
  end
41
41
 
42
42
  def clear_log_group
43
- logs.delete_log_group(log_group_name: log_group_name)
44
- rescue Aws::CloudWatchLogs::Errors::ResourceNotFoundException
45
- # pass
43
+ [log_group_name, fluentd_tag].each do |name|
44
+ begin
45
+ logs.delete_log_group(log_group_name: name)
46
+ rescue Aws::CloudWatchLogs::Errors::ResourceNotFoundException
47
+ # pass
48
+ end
49
+ end
50
+ end
51
+
52
+ def fluentd_tag
53
+ @fluentd_tag ||= "fluent.plugin.cloudwatch.test.#{Time.now.to_f}"
46
54
  end
47
55
 
48
56
  def create_log_stream
@@ -59,8 +67,8 @@ module CloudwatchLogsTestHelper
59
67
  end
60
68
  end
61
69
 
62
- def get_log_events
63
- logs.get_log_events(log_group_name: log_group_name, log_stream_name: log_stream_name).events
70
+ def get_log_events(tag = nil)
71
+ logs.get_log_events(log_group_name: tag || log_group_name, log_stream_name: log_stream_name).events
64
72
  end
65
73
 
66
74
  def put_log_events(events)
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fluent-plugin-cloudwatch-logs
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.4
4
+ version: 0.0.5
5
5
  platform: ruby
6
6
  authors:
7
7
  - Ryota Arai
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2014-08-19 00:00:00.000000000 Z
11
+ date: 2014-08-20 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: fluentd