fluent-plugin-dag 1.0.2
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.gitignore +18 -0
- data/Gemfile +3 -0
- data/LICENSE.txt +179 -0
- data/README.rdoc +74 -0
- data/Rakefile +14 -0
- data/VERSION +1 -0
- data/fluent-plugin-dag.gemspec +24 -0
- data/lib/fluent/plugin/out_dag.rb +199 -0
- data/test/out_dag.rb +162 -0
- metadata +136 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA1:
|
3
|
+
metadata.gz: a416a162fbd06d64c033891231ee14e85d9e0c36
|
4
|
+
data.tar.gz: c00cbf611b56d3399cc6264a88bd4eedd9d64d16
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: 8d13023e10951bb78f732dd660b68f2de689154842fe83c3cf2bd604a27a9bcd291b1945f70edd47a77c8b974ce9c7362d066cf174538781ee9606790c819252
|
7
|
+
data.tar.gz: 2fd1fef84d02ea51bebf789ead95e7618611a2cdc3261e73c402a158ef51230bf4ea924d48b850190a7dcd67ee4042af2add01f5f38b5624b99b5cb8f923f0c9
|
data/.gitignore
ADDED
data/Gemfile
ADDED
data/LICENSE.txt
ADDED
@@ -0,0 +1,179 @@
|
|
1
|
+
|
2
|
+
Apache License
|
3
|
+
Version 2.0, January 2004
|
4
|
+
http://www.apache.org/licenses/
|
5
|
+
|
6
|
+
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
7
|
+
|
8
|
+
1. Definitions.
|
9
|
+
|
10
|
+
"License" shall mean the terms and conditions for use, reproduction,
|
11
|
+
and distribution as defined by Sections 1 through 9 of this document.
|
12
|
+
|
13
|
+
"Licensor" shall mean the copyright owner or entity authorized by
|
14
|
+
the copyright owner that is granting the License.
|
15
|
+
|
16
|
+
"Legal Entity" shall mean the union of the acting entity and all
|
17
|
+
other entities that control, are controlled by, or are under common
|
18
|
+
control with that entity. For the purposes of this definition,
|
19
|
+
"control" means (i) the power, direct or indirect, to cause the
|
20
|
+
direction or management of such entity, whether by contract or
|
21
|
+
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
22
|
+
outstanding shares, or (iii) beneficial ownership of such entity.
|
23
|
+
|
24
|
+
"You" (or "Your") shall mean an individual or Legal Entity
|
25
|
+
exercising permissions granted by this License.
|
26
|
+
|
27
|
+
"Source" form shall mean the preferred form for making modifications,
|
28
|
+
including but not limited to software source code, documentation
|
29
|
+
source, and configuration files.
|
30
|
+
|
31
|
+
"Object" form shall mean any form resulting from mechanical
|
32
|
+
transformation or translation of a Source form, including but
|
33
|
+
not limited to compiled object code, generated documentation,
|
34
|
+
and conversions to other media types.
|
35
|
+
|
36
|
+
"Work" shall mean the work of authorship, whether in Source or
|
37
|
+
Object form, made available under the License, as indicated by a
|
38
|
+
copyright notice that is included in or attached to the work
|
39
|
+
(an example is provided in the Appendix below).
|
40
|
+
|
41
|
+
"Derivative Works" shall mean any work, whether in Source or Object
|
42
|
+
form, that is based on (or derived from) the Work and for which the
|
43
|
+
editorial revisions, annotations, elaborations, or other modifications
|
44
|
+
represent, as a whole, an original work of authorship. For the purposes
|
45
|
+
of this License, Derivative Works shall not include works that remain
|
46
|
+
separable from, or merely link (or bind by name) to the interfaces of,
|
47
|
+
the Work and Derivative Works thereof.
|
48
|
+
|
49
|
+
"Contribution" shall mean any work of authorship, including
|
50
|
+
the original version of the Work and any modifications or additions
|
51
|
+
to that Work or Derivative Works thereof, that is intentionally
|
52
|
+
submitted to Licensor for inclusion in the Work by the copyright owner
|
53
|
+
or by an individual or Legal Entity authorized to submit on behalf of
|
54
|
+
the copyright owner. For the purposes of this definition, "submitted"
|
55
|
+
means any form of electronic, verbal, or written communication sent
|
56
|
+
to the Licensor or its representatives, including but not limited to
|
57
|
+
communication on electronic mailing lists, source code control systems,
|
58
|
+
and issue tracking systems that are managed by, or on behalf of, the
|
59
|
+
Licensor for the purpose of discussing and improving the Work, but
|
60
|
+
excluding communication that is conspicuously marked or otherwise
|
61
|
+
designated in writing by the copyright owner as "Not a Contribution."
|
62
|
+
|
63
|
+
"Contributor" shall mean Licensor and any individual or Legal Entity
|
64
|
+
on behalf of whom a Contribution has been received by Licensor and
|
65
|
+
subsequently incorporated within the Work.
|
66
|
+
|
67
|
+
2. Grant of Copyright License. Subject to the terms and conditions of
|
68
|
+
this License, each Contributor hereby grants to You a perpetual,
|
69
|
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
70
|
+
copyright license to reproduce, prepare Derivative Works of,
|
71
|
+
publicly display, publicly perform, sublicense, and distribute the
|
72
|
+
Work and such Derivative Works in Source or Object form.
|
73
|
+
|
74
|
+
3. Grant of Patent License. Subject to the terms and conditions of
|
75
|
+
this License, each Contributor hereby grants to You a perpetual,
|
76
|
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
77
|
+
(except as stated in this section) patent license to make, have made,
|
78
|
+
use, offer to sell, sell, import, and otherwise transfer the Work,
|
79
|
+
where such license applies only to those patent claims licensable
|
80
|
+
by such Contributor that are necessarily infringed by their
|
81
|
+
Contribution(s) alone or by combination of their Contribution(s)
|
82
|
+
with the Work to which such Contribution(s) was submitted. If You
|
83
|
+
institute patent litigation against any entity (including a
|
84
|
+
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
85
|
+
or a Contribution incorporated within the Work constitutes direct
|
86
|
+
or contributory patent infringement, then any patent licenses
|
87
|
+
granted to You under this License for that Work shall terminate
|
88
|
+
as of the date such litigation is filed.
|
89
|
+
|
90
|
+
4. Redistribution. You may reproduce and distribute copies of the
|
91
|
+
Work or Derivative Works thereof in any medium, with or without
|
92
|
+
modifications, and in Source or Object form, provided that You
|
93
|
+
meet the following conditions:
|
94
|
+
|
95
|
+
(a) You must give any other recipients of the Work or
|
96
|
+
Derivative Works a copy of this License; and
|
97
|
+
|
98
|
+
(b) You must cause any modified files to carry prominent notices
|
99
|
+
stating that You changed the files; and
|
100
|
+
|
101
|
+
(c) You must retain, in the Source form of any Derivative Works
|
102
|
+
that You distribute, all copyright, patent, trademark, and
|
103
|
+
attribution notices from the Source form of the Work,
|
104
|
+
excluding those notices that do not pertain to any part of
|
105
|
+
the Derivative Works; and
|
106
|
+
|
107
|
+
(d) If the Work includes a "NOTICE" text file as part of its
|
108
|
+
distribution, then any Derivative Works that You distribute must
|
109
|
+
include a readable copy of the attribution notices contained
|
110
|
+
within such NOTICE file, excluding those notices that do not
|
111
|
+
pertain to any part of the Derivative Works, in at least one
|
112
|
+
of the following places: within a NOTICE text file distributed
|
113
|
+
as part of the Derivative Works; within the Source form or
|
114
|
+
documentation, if provided along with the Derivative Works; or,
|
115
|
+
within a display generated by the Derivative Works, if and
|
116
|
+
wherever such third-party notices normally appear. The contents
|
117
|
+
of the NOTICE file are for informational purposes only and
|
118
|
+
do not modify the License. You may add Your own attribution
|
119
|
+
notices within Derivative Works that You distribute, alongside
|
120
|
+
or as an addendum to the NOTICE text from the Work, provided
|
121
|
+
that such additional attribution notices cannot be construed
|
122
|
+
as modifying the License.
|
123
|
+
|
124
|
+
You may add Your own copyright statement to Your modifications and
|
125
|
+
may provide additional or different license terms and conditions
|
126
|
+
for use, reproduction, or distribution of Your modifications, or
|
127
|
+
for any such Derivative Works as a whole, provided Your use,
|
128
|
+
reproduction, and distribution of the Work otherwise complies with
|
129
|
+
the conditions stated in this License.
|
130
|
+
|
131
|
+
5. Submission of Contributions. Unless You explicitly state otherwise,
|
132
|
+
any Contribution intentionally submitted for inclusion in the Work
|
133
|
+
by You to the Licensor shall be under the terms and conditions of
|
134
|
+
this License, without any additional terms or conditions.
|
135
|
+
Notwithstanding the above, nothing herein shall supersede or modify
|
136
|
+
the terms of any separate license agreement you may have executed
|
137
|
+
with Licensor regarding such Contributions.
|
138
|
+
|
139
|
+
6. Trademarks. This License does not grant permission to use the trade
|
140
|
+
names, trademarks, service marks, or product names of the Licensor,
|
141
|
+
except as required for reasonable and customary use in describing the
|
142
|
+
origin of the Work and reproducing the content of the NOTICE file.
|
143
|
+
|
144
|
+
7. Disclaimer of Warranty. Unless required by applicable law or
|
145
|
+
agreed to in writing, Licensor provides the Work (and each
|
146
|
+
Contributor provides its Contributions) on an "AS IS" BASIS,
|
147
|
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
148
|
+
implied, including, without limitation, any warranties or conditions
|
149
|
+
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
150
|
+
PARTICULAR PURPOSE. You are solely responsible for determining the
|
151
|
+
appropriateness of using or redistributing the Work and assume any
|
152
|
+
risks associated with Your exercise of permissions under this License.
|
153
|
+
|
154
|
+
8. Limitation of Liability. In no event and under no legal theory,
|
155
|
+
whether in tort (including negligence), contract, or otherwise,
|
156
|
+
unless required by applicable law (such as deliberate and grossly
|
157
|
+
negligent acts) or agreed to in writing, shall any Contributor be
|
158
|
+
liable to You for damages, including any direct, indirect, special,
|
159
|
+
incidental, or consequential damages of any character arising as a
|
160
|
+
result of this License or out of the use or inability to use the
|
161
|
+
Work (including but not limited to damages for loss of goodwill,
|
162
|
+
work stoppage, computer failure or malfunction, or any and all
|
163
|
+
other commercial damages or losses), even if such Contributor
|
164
|
+
has been advised of the possibility of such damages.
|
165
|
+
|
166
|
+
9. Accepting Warranty or Additional Liability. While redistributing
|
167
|
+
the Work or Derivative Works thereof, You may choose to offer,
|
168
|
+
and charge a fee for, acceptance of support, warranty, indemnity,
|
169
|
+
or other liability obligations and/or rights consistent with this
|
170
|
+
License. However, in accepting such obligations, You may act only
|
171
|
+
on Your own behalf and on Your sole responsibility, not on behalf
|
172
|
+
of any other Contributor, and only if You agree to indemnify,
|
173
|
+
defend, and hold each Contributor harmless for any liability
|
174
|
+
incurred by, or claims asserted against, such Contributor by reason
|
175
|
+
of your accepting any such warranty or additional liability.
|
176
|
+
|
177
|
+
Twitter4J SUBCOMPONENTS:
|
178
|
+
|
179
|
+
Twitter4J includes software from JSON.org to parse JSON response from the Twitter API. You can see the license term at http://www.JSON.org/license.html
|
data/README.rdoc
ADDED
@@ -0,0 +1,74 @@
|
|
1
|
+
= IIJ Dag output plugin for {Fluentd}[http://github.com/fluent/fluentd]
|
2
|
+
|
3
|
+
== 概要
|
4
|
+
|
5
|
+
Fluentd の IIJ GIOストレージ&アナリシスサービス向けアウトプットプラグインではローカルファイルのイベントログをバッファーし、解析可能な構造へ変換して定期的にアップロードします。
|
6
|
+
|
7
|
+
また、このプラグインはイベントログの時間を参照して正確にファイルを分割します。
|
8
|
+
|
9
|
+
データの格納構造は以下のようになります。
|
10
|
+
|
11
|
+
dag://${database(bucket)}/${table}/date=yyyymmdd/hour=%-H/dag_%-M_0.gz
|
12
|
+
|
13
|
+
データフォーマットは以下のように \\n 区切りのjson文字列となります。
|
14
|
+
|
15
|
+
{"time":${unixtime}, "v":"${record}"}
|
16
|
+
{"time":${unixtime}, "v":"${record}"}
|
17
|
+
...
|
18
|
+
|
19
|
+
== 必要なもの
|
20
|
+
- (ruby)[https://www.ruby-lang.org/]
|
21
|
+
- (bundler)[http://bundler.io]
|
22
|
+
- (fluentd)[http://fluentd.org/]
|
23
|
+
|
24
|
+
== インストール
|
25
|
+
- Rubygems.orgよりインストール
|
26
|
+
gem install fluent-plugin-dag
|
27
|
+
|
28
|
+
== 設定
|
29
|
+
- 設定ファイルを用意
|
30
|
+
|
31
|
+
<match pattern>
|
32
|
+
type dag
|
33
|
+
|
34
|
+
dag_key_id YOUR_DAGRIN_KEY_ID
|
35
|
+
dag_sec_key YOUR_DAGRIN_SECRET_KEY
|
36
|
+
dag_endpoint storage-dag.iijgio.com
|
37
|
+
dag_database sample_db
|
38
|
+
dag_table sample_tbl
|
39
|
+
buffer_path local_path
|
40
|
+
flush_interval 5m
|
41
|
+
buffer_chunk_limit 100m
|
42
|
+
</match>
|
43
|
+
|
44
|
+
[dag_key_id (必須)] ストレージサービスのaccess key id
|
45
|
+
|
46
|
+
[dag_sec_key (必須)] ストレージサービスのsecret key
|
47
|
+
|
48
|
+
[dag_database (必須)] ストレージサービスのbucket名
|
49
|
+
[dag_table (必須)] ストレージサービスのテーブル名
|
50
|
+
|
51
|
+
[dag_endpoint] ストレージサービスのエンドポイント名
|
52
|
+
|
53
|
+
[auto_create_bucket] ストレージサービス上に bucket が存在しないなら作成する。デフォルトでtrue
|
54
|
+
|
55
|
+
[check_apikey_on_start] 起動時にストレージサービスのkeyを確認する。デフォルトでtrue
|
56
|
+
|
57
|
+
[proxy_uri] プロキシ環境でのURI
|
58
|
+
|
59
|
+
[buffer_path (必須)] バッファー対象のファイルのパス
|
60
|
+
|
61
|
+
[flush_interval] ログの送信時間(秒)。デフォルトでは5分。1分以上の時間を指定してください。(s|m|h)で単位指定が可能。
|
62
|
+
|
63
|
+
[buffer_chunk_limit] バッファーの最大サイズ。このサイズを超えるとバッファーから強制的にflushされます。デフォルトでは8MB。ストレージサービスでは100MBが推奨されています。接尾辞 “k” (KB), “m” (MB), “g” (GB) が使用できます。
|
64
|
+
|
65
|
+
[utc] UTCを使用する。
|
66
|
+
|
67
|
+
|
68
|
+
== 起動方法
|
69
|
+
fluend.confを設定ファイルとした場合、以下のようにコマンドを実行する
|
70
|
+
|
71
|
+
fluentd -c ./fluend.conf -d fluentd.pid -l fluentd.log -vvv
|
72
|
+
|
73
|
+
== License
|
74
|
+
Apache License 2.0
|
data/Rakefile
ADDED
@@ -0,0 +1,14 @@
|
|
1
|
+
|
2
|
+
require 'bundler'
|
3
|
+
Bundler::GemHelper.install_tasks
|
4
|
+
|
5
|
+
require 'rake/testtask'
|
6
|
+
|
7
|
+
Rake::TestTask.new(:test) do |test|
|
8
|
+
test.libs << 'lib' << 'test'
|
9
|
+
test.test_files = FileList['test/*.rb']
|
10
|
+
test.verbose = true
|
11
|
+
end
|
12
|
+
|
13
|
+
task :default => [:build]
|
14
|
+
|
data/VERSION
ADDED
@@ -0,0 +1 @@
|
|
1
|
+
1.0.2
|
@@ -0,0 +1,24 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
$:.push File.expand_path('../lib', __FILE__)
|
3
|
+
|
4
|
+
Gem::Specification.new do |gem|
|
5
|
+
gem.name = "fluent-plugin-dag"
|
6
|
+
gem.description = "Dag output plugin for Fluentd event collector"
|
7
|
+
gem.homepage = "http://www.iij.ad.jp/biz/storage/"
|
8
|
+
gem.summary = gem.description
|
9
|
+
gem.version = File.read("VERSION").strip
|
10
|
+
gem.authors = ["iij"]
|
11
|
+
gem.email = "dag-info@iij.ad.jp"
|
12
|
+
gem.has_rdoc = false
|
13
|
+
gem.files = `git ls-files`.split("\n").reject {|item| item =~ /^(fluent_sample.conf)/ }
|
14
|
+
gem.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
|
15
|
+
gem.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
|
16
|
+
gem.require_paths = ['lib']
|
17
|
+
|
18
|
+
gem.add_dependency "fluentd", "~> 0.10.0"
|
19
|
+
gem.add_dependency "aws-sdk-v1", "~> 1.59.0"
|
20
|
+
gem.add_dependency "yajl-ruby", "~> 1.0"
|
21
|
+
gem.add_dependency "fluent-mixin-config-placeholders", "~> 0.2.0"
|
22
|
+
gem.add_development_dependency "rake", ">= 0.9.2"
|
23
|
+
gem.add_development_dependency "flexmock", ">= 1.2.0"
|
24
|
+
end
|
@@ -0,0 +1,199 @@
|
|
1
|
+
module Fluent
|
2
|
+
|
3
|
+
require 'fluent/mixin/config_placeholders'
|
4
|
+
|
5
|
+
class DagOutput < Fluent::TimeSlicedOutput
|
6
|
+
Fluent::Plugin.register_output('dag', self)
|
7
|
+
|
8
|
+
unless method_defined?(:log)
|
9
|
+
define_method(:log) { $log }
|
10
|
+
end
|
11
|
+
|
12
|
+
def initialize
|
13
|
+
super
|
14
|
+
require 'aws-sdk-v1'
|
15
|
+
require 'zlib'
|
16
|
+
require 'time'
|
17
|
+
require 'tempfile'
|
18
|
+
require 'open3'
|
19
|
+
|
20
|
+
@use_ssl = true
|
21
|
+
end
|
22
|
+
|
23
|
+
config_param :time_format, :string, :default => nil
|
24
|
+
|
25
|
+
include SetTagKeyMixin
|
26
|
+
config_set_default :include_tag_key, false
|
27
|
+
|
28
|
+
config_param :dag_key_id, :string, :default => nil
|
29
|
+
config_param :dag_sec_key, :string, :default => nil
|
30
|
+
config_param :dag_database, :string
|
31
|
+
config_param :dag_table, :string
|
32
|
+
config_param :dag_endpoint, :string, :default => nil
|
33
|
+
config_param :dag_force_path_style, :bool, :default => false
|
34
|
+
config_param :auto_create_bucket, :bool, :default => true
|
35
|
+
config_param :check_apikey_on_start, :bool, :default => true
|
36
|
+
config_param :proxy_uri, :string, :default => nil
|
37
|
+
|
38
|
+
config_set_default :buffer_type, 'file' # overwrite default buffer_type
|
39
|
+
config_set_default :flush_interval, 300
|
40
|
+
|
41
|
+
attr_reader :bucket
|
42
|
+
|
43
|
+
include Fluent::Mixin::ConfigPlaceholders
|
44
|
+
|
45
|
+
def placeholders
|
46
|
+
[:percent]
|
47
|
+
end
|
48
|
+
|
49
|
+
def configure(conf)
|
50
|
+
super
|
51
|
+
|
52
|
+
unless conf['flush_interval'].nil?
|
53
|
+
unless valid_flush_interval?(conf['flush_interval'])
|
54
|
+
raise "flush_interval is invalid. Please check your configuration"
|
55
|
+
end
|
56
|
+
end
|
57
|
+
|
58
|
+
unless valid_dag_database?(conf['dag_database'])
|
59
|
+
raise "dag_database is invalid. Please check your configuration"
|
60
|
+
end
|
61
|
+
|
62
|
+
unless valid_dag_table?(conf['dag_table'])
|
63
|
+
raise "dag_table is invalid. Please check your configuration"
|
64
|
+
end
|
65
|
+
|
66
|
+
@path = "#{@dag_table}/date=%Y%m%d/hour=%-H/dag_%-M"
|
67
|
+
@dag_object_key_format = "%{path}_%{index}.%{file_extension}"
|
68
|
+
|
69
|
+
if use_ssl = conf['use_ssl']
|
70
|
+
if use_ssl.empty?
|
71
|
+
@use_ssl = true
|
72
|
+
else
|
73
|
+
@use_ssl = Config.bool_value(use_ssl)
|
74
|
+
if @use_ssl.nil?
|
75
|
+
raise ConfigError, "'true' or 'false' is required for use_ssl option on dag output"
|
76
|
+
end
|
77
|
+
end
|
78
|
+
end
|
79
|
+
|
80
|
+
@ext, @mime_type = ['gz', 'application/x-gzip']
|
81
|
+
@timef = TimeFormatter.new(@time_format, @localtime)
|
82
|
+
|
83
|
+
if @localtime
|
84
|
+
@path_slicer = Proc.new {|path|
|
85
|
+
Time.now.strftime(path)
|
86
|
+
}
|
87
|
+
else
|
88
|
+
@path_slicer = Proc.new {|path|
|
89
|
+
Time.now.utc.strftime(path)
|
90
|
+
}
|
91
|
+
end
|
92
|
+
|
93
|
+
end
|
94
|
+
|
95
|
+
def start
|
96
|
+
super
|
97
|
+
options = {}
|
98
|
+
if @dag_key_id && @dag_sec_key
|
99
|
+
options[:access_key_id] = @dag_key_id
|
100
|
+
options[:secret_access_key] = @dag_sec_key
|
101
|
+
end
|
102
|
+
options[:s3_endpoint] = @dag_endpoint
|
103
|
+
options[:proxy_uri] = @proxy_uri if @proxy_uri
|
104
|
+
options[:use_ssl] = @use_ssl
|
105
|
+
options[:s3_force_path_style] = @dag_force_path_style
|
106
|
+
|
107
|
+
@dag = AWS::S3.new(options)
|
108
|
+
@bucket = @dag.buckets[@dag_database]
|
109
|
+
|
110
|
+
ensure_bucket
|
111
|
+
check_apikeys if @check_apikey_on_start
|
112
|
+
end
|
113
|
+
|
114
|
+
def format(tag, time, record)
|
115
|
+
# copied from each mixin because current TimeSlicedOutput can't support mixins.
|
116
|
+
if @include_tag_key
|
117
|
+
record[@tag_key] = tag
|
118
|
+
end
|
119
|
+
|
120
|
+
dag_record = {}
|
121
|
+
dag_record[:time] = time
|
122
|
+
dag_record[:v] = record
|
123
|
+
Yajl.dump(dag_record) + "\n"
|
124
|
+
end
|
125
|
+
|
126
|
+
def write(chunk)
|
127
|
+
i = 0
|
128
|
+
|
129
|
+
begin
|
130
|
+
path = @path_slicer.call(@path)
|
131
|
+
values_for_dag_object_key = {
|
132
|
+
"path" => path,
|
133
|
+
"time_slice" => chunk.key,
|
134
|
+
"file_extension" => @ext,
|
135
|
+
"index" => i
|
136
|
+
}
|
137
|
+
dag_path = @dag_object_key_format.gsub(%r(%{[^}]+})) { |expr|
|
138
|
+
values_for_dag_object_key[expr[2...expr.size-1]]
|
139
|
+
}
|
140
|
+
i += 1
|
141
|
+
end while @bucket.objects[dag_path].exists?
|
142
|
+
|
143
|
+
tmp = Tempfile.new("dag-")
|
144
|
+
begin
|
145
|
+
w = Zlib::GzipWriter.new(tmp)
|
146
|
+
chunk.write_to(w)
|
147
|
+
w.close
|
148
|
+
@bucket.objects[dag_path].write(Pathname.new(tmp.path), {:content_type => @mime_type})
|
149
|
+
ensure
|
150
|
+
tmp.close(true) rescue nil
|
151
|
+
w.close rescue nil
|
152
|
+
w.unlink rescue nil
|
153
|
+
end
|
154
|
+
end
|
155
|
+
|
156
|
+
private
|
157
|
+
|
158
|
+
def ensure_bucket
|
159
|
+
if !@bucket.exists?
|
160
|
+
if @auto_create_bucket
|
161
|
+
log.info "Creating database #{@dag_database} on #{@dag_endpoint}"
|
162
|
+
@dag.buckets.create(@dag_database)
|
163
|
+
else
|
164
|
+
raise "The specified bucket does not exist: bucket = #{@dag_database}"
|
165
|
+
end
|
166
|
+
end
|
167
|
+
end
|
168
|
+
|
169
|
+
def check_apikeys
|
170
|
+
begin
|
171
|
+
@bucket.empty?
|
172
|
+
rescue AWS::S3::Errors::NoSuchBucket
|
173
|
+
rescue AWS::S3::Errors::InvalidAccessKeyId
|
174
|
+
raise "dag_key_id is invalid. Please check your configuration"
|
175
|
+
rescue AWS::S3::Errors::SignatureDoesNotMatch
|
176
|
+
raise "dag_sec_key is invalid. Please check your configuration"
|
177
|
+
end
|
178
|
+
end
|
179
|
+
|
180
|
+
def valid_flush_interval?(flush_interval)
|
181
|
+
interval = case flush_interval
|
182
|
+
when /^(?<time>\d+)s$/; $~[:time].to_i
|
183
|
+
when /^(?<time>\d+)m$/; $~[:time].to_i*60
|
184
|
+
when /^(?<time>\d+)h$/; $~[:time].to_i*60*60
|
185
|
+
when /^(?<time>\d+)$/; $~[:time].to_i
|
186
|
+
end
|
187
|
+
interval >= 60 ? true : false
|
188
|
+
end
|
189
|
+
|
190
|
+
def valid_dag_database?(database)
|
191
|
+
database =~ /^([0-9a-z]+)$/ ? true : false
|
192
|
+
end
|
193
|
+
|
194
|
+
def valid_dag_table?(table)
|
195
|
+
table =~ /^([0-9a-z_]+)$/ ? true : false
|
196
|
+
end
|
197
|
+
end
|
198
|
+
|
199
|
+
end
|
data/test/out_dag.rb
ADDED
@@ -0,0 +1,162 @@
|
|
1
|
+
require 'fluent/test'
|
2
|
+
require 'fluent/plugin/out_dag'
|
3
|
+
|
4
|
+
require 'flexmock/test_unit'
|
5
|
+
require 'zlib'
|
6
|
+
|
7
|
+
class DagOutputTest < Test::Unit::TestCase
|
8
|
+
def setup
|
9
|
+
require 'aws-sdk'
|
10
|
+
Fluent::Test.setup
|
11
|
+
end
|
12
|
+
|
13
|
+
CONFIG = %[
|
14
|
+
dag_key_id test_key_id
|
15
|
+
dag_sec_key test_sec_key
|
16
|
+
dag_database test_db
|
17
|
+
dag_table test_tbl
|
18
|
+
utc
|
19
|
+
buffer_type memory
|
20
|
+
]
|
21
|
+
|
22
|
+
def create_driver(conf = CONFIG)
|
23
|
+
Fluent::Test::BufferedOutputTestDriver.new(Fluent::DagOutput) do
|
24
|
+
def write(chunk)
|
25
|
+
chunk.read
|
26
|
+
end
|
27
|
+
|
28
|
+
private
|
29
|
+
|
30
|
+
def ensure_bucket
|
31
|
+
end
|
32
|
+
|
33
|
+
def check_apikeys
|
34
|
+
end
|
35
|
+
end.configure(conf)
|
36
|
+
end
|
37
|
+
|
38
|
+
def test_configure
|
39
|
+
d = create_driver
|
40
|
+
assert_equal 'test_key_id', d.instance.dag_key_id
|
41
|
+
assert_equal 'test_sec_key', d.instance.dag_sec_key
|
42
|
+
assert_equal 'test_db', d.instance.dag_database
|
43
|
+
assert_equal 'test_tbl', d.instance.dag_table
|
44
|
+
assert d.instance.instance_variable_get(:@use_ssl)
|
45
|
+
assert_equal 'gz', d.instance.instance_variable_get(:@ext)
|
46
|
+
assert_equal 'application/x-gzip', d.instance.instance_variable_get(:@mime_type)
|
47
|
+
end
|
48
|
+
|
49
|
+
def test_path_slicing
|
50
|
+
d = create_driver(CONFIG)
|
51
|
+
path_slicer = d.instance.instance_variable_get(:@path_slicer)
|
52
|
+
path = d.instance.instance_variable_get(:@path)
|
53
|
+
slice = path_slicer.call(path)
|
54
|
+
assert_equal slice, Time.now.utc.strftime("test_tbl/date=%Y%m%d/hour=%-H/dag_%-M")
|
55
|
+
end
|
56
|
+
|
57
|
+
def test_path_slicing_utc
|
58
|
+
config = CONFIG.clone
|
59
|
+
config << "\nutc\n"
|
60
|
+
d = create_driver(config)
|
61
|
+
path_slicer = d.instance.instance_variable_get(:@path_slicer)
|
62
|
+
path = d.instance.instance_variable_get(:@path)
|
63
|
+
slice = path_slicer.call(path)
|
64
|
+
assert_equal slice, Time.now.utc.strftime("test_tbl/date=%Y%m%d/hour=%-H/dag_%-M")
|
65
|
+
end
|
66
|
+
|
67
|
+
def test_format
|
68
|
+
d = create_driver
|
69
|
+
|
70
|
+
time = Time.parse("2011-01-02 13:14:15 UTC").to_i
|
71
|
+
d.emit({"a"=>1}, time)
|
72
|
+
d.emit({"a"=>2}, time)
|
73
|
+
|
74
|
+
d.expect_format %[{"time":#{time},"v":{"a":1}}\n]
|
75
|
+
d.expect_format %[{"time":#{time},"v":{"a":2}}\n]
|
76
|
+
|
77
|
+
d.run
|
78
|
+
end
|
79
|
+
|
80
|
+
def test_format_included_tag
|
81
|
+
config = [CONFIG, 'include_tag_key true'].join("\n")
|
82
|
+
d = create_driver(config)
|
83
|
+
|
84
|
+
time = Time.parse("2011-01-02 13:14:15 UTC").to_i
|
85
|
+
d.emit({"a"=>1}, time)
|
86
|
+
d.emit({"a"=>2}, time)
|
87
|
+
|
88
|
+
d.expect_format %[{"time":#{time},"v":{"a":1,"tag":"test"}}\n]
|
89
|
+
d.expect_format %[{"time":#{time},"v":{"a":2,"tag":"test"}}\n]
|
90
|
+
|
91
|
+
d.run
|
92
|
+
end
|
93
|
+
|
94
|
+
def test_chunk_to_write
|
95
|
+
d = create_driver
|
96
|
+
|
97
|
+
time = Time.parse("2011-01-02 13:14:15 UTC").to_i
|
98
|
+
d.emit({"a"=>1}, time)
|
99
|
+
d.emit({"a"=>2}, time)
|
100
|
+
|
101
|
+
# DagOutputTest#write returns chunk.read
|
102
|
+
data = d.run
|
103
|
+
|
104
|
+
assert_equal %[{"time":#{time},"v":{"a":1}}\n] +
|
105
|
+
%[{"time":#{time},"v":{"a":2}}\n],
|
106
|
+
data
|
107
|
+
end
|
108
|
+
|
109
|
+
CONFIG2 = %[
|
110
|
+
hostname testing.node.local
|
111
|
+
dag_key_id test_key_id
|
112
|
+
dag_sec_key test_sec_key
|
113
|
+
dag_database test_db
|
114
|
+
dag_table test_tbl
|
115
|
+
time_slice_format %Y%m%d-%H
|
116
|
+
utc
|
117
|
+
buffer_type memory
|
118
|
+
auto_create_bucket false
|
119
|
+
log_level debug
|
120
|
+
]
|
121
|
+
|
122
|
+
def create_time_sliced_driver(additional_conf = '')
|
123
|
+
d = Fluent::Test::TimeSlicedOutputTestDriver.new(Fluent::DagOutput) do
|
124
|
+
private
|
125
|
+
|
126
|
+
def check_apikeys
|
127
|
+
end
|
128
|
+
end.configure([CONFIG2, additional_conf].join("\n"))
|
129
|
+
d
|
130
|
+
end
|
131
|
+
|
132
|
+
def setup_mocks(exists_return = false)
|
133
|
+
dag_bucket = flexmock(AWS::S3::Bucket)
|
134
|
+
dag_bucket.should_receive(:exists?).with_any_args.and_return { exists_return }
|
135
|
+
dag_bucket_col = flexmock(AWS::S3::BucketCollection)
|
136
|
+
dag_bucket_col.should_receive(:[]).with_any_args.and_return { dag_bucket }
|
137
|
+
flexmock(AWS::S3).new_instances do |bucket|
|
138
|
+
bucket.should_receive(:buckets).with_any_args.and_return { dag_bucket_col }
|
139
|
+
end
|
140
|
+
|
141
|
+
return dag_bucket, dag_bucket_col
|
142
|
+
end
|
143
|
+
|
144
|
+
def test_auto_create_bucket_false_with_non_existence_bucket
|
145
|
+
dag_bucket, dag_bucket_col = setup_mocks
|
146
|
+
|
147
|
+
d = create_time_sliced_driver('auto_create_bucket false')
|
148
|
+
assert_raise(RuntimeError, "The specified bucket does not exist: bucket = test_bucket") {
|
149
|
+
d.run
|
150
|
+
}
|
151
|
+
end
|
152
|
+
|
153
|
+
def test_auto_create_bucket_true_with_non_existence_bucket
|
154
|
+
dag_bucket, dag_bucket_col = setup_mocks
|
155
|
+
dag_bucket_col.should_receive(:create).with_any_args.and_return { true }
|
156
|
+
|
157
|
+
d = create_time_sliced_driver('auto_create_bucket true')
|
158
|
+
assert_nothing_raised {
|
159
|
+
d.run
|
160
|
+
}
|
161
|
+
end
|
162
|
+
end
|
metadata
ADDED
@@ -0,0 +1,136 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: fluent-plugin-dag
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 1.0.2
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- iij
|
8
|
+
autorequire:
|
9
|
+
bindir: bin
|
10
|
+
cert_chain: []
|
11
|
+
date: 2015-06-04 00:00:00.000000000 Z
|
12
|
+
dependencies:
|
13
|
+
- !ruby/object:Gem::Dependency
|
14
|
+
name: fluentd
|
15
|
+
requirement: !ruby/object:Gem::Requirement
|
16
|
+
requirements:
|
17
|
+
- - "~>"
|
18
|
+
- !ruby/object:Gem::Version
|
19
|
+
version: 0.10.0
|
20
|
+
type: :runtime
|
21
|
+
prerelease: false
|
22
|
+
version_requirements: !ruby/object:Gem::Requirement
|
23
|
+
requirements:
|
24
|
+
- - "~>"
|
25
|
+
- !ruby/object:Gem::Version
|
26
|
+
version: 0.10.0
|
27
|
+
- !ruby/object:Gem::Dependency
|
28
|
+
name: aws-sdk-v1
|
29
|
+
requirement: !ruby/object:Gem::Requirement
|
30
|
+
requirements:
|
31
|
+
- - "~>"
|
32
|
+
- !ruby/object:Gem::Version
|
33
|
+
version: 1.59.0
|
34
|
+
type: :runtime
|
35
|
+
prerelease: false
|
36
|
+
version_requirements: !ruby/object:Gem::Requirement
|
37
|
+
requirements:
|
38
|
+
- - "~>"
|
39
|
+
- !ruby/object:Gem::Version
|
40
|
+
version: 1.59.0
|
41
|
+
- !ruby/object:Gem::Dependency
|
42
|
+
name: yajl-ruby
|
43
|
+
requirement: !ruby/object:Gem::Requirement
|
44
|
+
requirements:
|
45
|
+
- - "~>"
|
46
|
+
- !ruby/object:Gem::Version
|
47
|
+
version: '1.0'
|
48
|
+
type: :runtime
|
49
|
+
prerelease: false
|
50
|
+
version_requirements: !ruby/object:Gem::Requirement
|
51
|
+
requirements:
|
52
|
+
- - "~>"
|
53
|
+
- !ruby/object:Gem::Version
|
54
|
+
version: '1.0'
|
55
|
+
- !ruby/object:Gem::Dependency
|
56
|
+
name: fluent-mixin-config-placeholders
|
57
|
+
requirement: !ruby/object:Gem::Requirement
|
58
|
+
requirements:
|
59
|
+
- - "~>"
|
60
|
+
- !ruby/object:Gem::Version
|
61
|
+
version: 0.2.0
|
62
|
+
type: :runtime
|
63
|
+
prerelease: false
|
64
|
+
version_requirements: !ruby/object:Gem::Requirement
|
65
|
+
requirements:
|
66
|
+
- - "~>"
|
67
|
+
- !ruby/object:Gem::Version
|
68
|
+
version: 0.2.0
|
69
|
+
- !ruby/object:Gem::Dependency
|
70
|
+
name: rake
|
71
|
+
requirement: !ruby/object:Gem::Requirement
|
72
|
+
requirements:
|
73
|
+
- - ">="
|
74
|
+
- !ruby/object:Gem::Version
|
75
|
+
version: 0.9.2
|
76
|
+
type: :development
|
77
|
+
prerelease: false
|
78
|
+
version_requirements: !ruby/object:Gem::Requirement
|
79
|
+
requirements:
|
80
|
+
- - ">="
|
81
|
+
- !ruby/object:Gem::Version
|
82
|
+
version: 0.9.2
|
83
|
+
- !ruby/object:Gem::Dependency
|
84
|
+
name: flexmock
|
85
|
+
requirement: !ruby/object:Gem::Requirement
|
86
|
+
requirements:
|
87
|
+
- - ">="
|
88
|
+
- !ruby/object:Gem::Version
|
89
|
+
version: 1.2.0
|
90
|
+
type: :development
|
91
|
+
prerelease: false
|
92
|
+
version_requirements: !ruby/object:Gem::Requirement
|
93
|
+
requirements:
|
94
|
+
- - ">="
|
95
|
+
- !ruby/object:Gem::Version
|
96
|
+
version: 1.2.0
|
97
|
+
description: Dag output plugin for Fluentd event collector
|
98
|
+
email: dag-info@iij.ad.jp
|
99
|
+
executables: []
|
100
|
+
extensions: []
|
101
|
+
extra_rdoc_files: []
|
102
|
+
files:
|
103
|
+
- ".gitignore"
|
104
|
+
- Gemfile
|
105
|
+
- LICENSE.txt
|
106
|
+
- README.rdoc
|
107
|
+
- Rakefile
|
108
|
+
- VERSION
|
109
|
+
- fluent-plugin-dag.gemspec
|
110
|
+
- lib/fluent/plugin/out_dag.rb
|
111
|
+
- test/out_dag.rb
|
112
|
+
homepage: http://www.iij.ad.jp/biz/storage/
|
113
|
+
licenses: []
|
114
|
+
metadata: {}
|
115
|
+
post_install_message:
|
116
|
+
rdoc_options: []
|
117
|
+
require_paths:
|
118
|
+
- lib
|
119
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
120
|
+
requirements:
|
121
|
+
- - ">="
|
122
|
+
- !ruby/object:Gem::Version
|
123
|
+
version: '0'
|
124
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
125
|
+
requirements:
|
126
|
+
- - ">="
|
127
|
+
- !ruby/object:Gem::Version
|
128
|
+
version: '0'
|
129
|
+
requirements: []
|
130
|
+
rubyforge_project:
|
131
|
+
rubygems_version: 2.2.3
|
132
|
+
signing_key:
|
133
|
+
specification_version: 4
|
134
|
+
summary: Dag output plugin for Fluentd event collector
|
135
|
+
test_files:
|
136
|
+
- test/out_dag.rb
|