fluent-plugin-opensearch 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. checksums.yaml +7 -0
  2. data/.coveralls.yml +1 -0
  3. data/.editorconfig +9 -0
  4. data/.github/ISSUE_TEMPLATE/bug_report.md +37 -0
  5. data/.github/ISSUE_TEMPLATE/feature_request.md +24 -0
  6. data/.github/workflows/coverage.yaml +22 -0
  7. data/.github/workflows/issue-auto-closer.yml +12 -0
  8. data/.github/workflows/linux.yml +26 -0
  9. data/.github/workflows/macos.yml +26 -0
  10. data/.github/workflows/windows.yml +26 -0
  11. data/.gitignore +18 -0
  12. data/CONTRIBUTING.md +24 -0
  13. data/Gemfile +10 -0
  14. data/History.md +6 -0
  15. data/ISSUE_TEMPLATE.md +26 -0
  16. data/LICENSE.txt +201 -0
  17. data/PULL_REQUEST_TEMPLATE.md +9 -0
  18. data/README.OpenSearchGenID.md +116 -0
  19. data/README.OpenSearchInput.md +291 -0
  20. data/README.Troubleshooting.md +482 -0
  21. data/README.md +1556 -0
  22. data/Rakefile +37 -0
  23. data/fluent-plugin-opensearch.gemspec +38 -0
  24. data/gemfiles/Gemfile.elasticsearch.v6 +12 -0
  25. data/lib/fluent/log-ext.rb +64 -0
  26. data/lib/fluent/plugin/filter_opensearch_genid.rb +103 -0
  27. data/lib/fluent/plugin/in_opensearch.rb +351 -0
  28. data/lib/fluent/plugin/oj_serializer.rb +48 -0
  29. data/lib/fluent/plugin/opensearch_constants.rb +39 -0
  30. data/lib/fluent/plugin/opensearch_error.rb +31 -0
  31. data/lib/fluent/plugin/opensearch_error_handler.rb +166 -0
  32. data/lib/fluent/plugin/opensearch_fallback_selector.rb +36 -0
  33. data/lib/fluent/plugin/opensearch_index_template.rb +155 -0
  34. data/lib/fluent/plugin/opensearch_simple_sniffer.rb +36 -0
  35. data/lib/fluent/plugin/opensearch_tls.rb +96 -0
  36. data/lib/fluent/plugin/out_opensearch.rb +1124 -0
  37. data/lib/fluent/plugin/out_opensearch_data_stream.rb +214 -0
  38. data/test/helper.rb +61 -0
  39. data/test/plugin/test_alias_template.json +9 -0
  40. data/test/plugin/test_filter_opensearch_genid.rb +241 -0
  41. data/test/plugin/test_in_opensearch.rb +493 -0
  42. data/test/plugin/test_index_alias_template.json +11 -0
  43. data/test/plugin/test_index_template.json +25 -0
  44. data/test/plugin/test_oj_serializer.rb +45 -0
  45. data/test/plugin/test_opensearch_error_handler.rb +689 -0
  46. data/test/plugin/test_opensearch_fallback_selector.rb +100 -0
  47. data/test/plugin/test_opensearch_tls.rb +171 -0
  48. data/test/plugin/test_out_opensearch.rb +3953 -0
  49. data/test/plugin/test_out_opensearch_data_stream.rb +474 -0
  50. data/test/plugin/test_template.json +23 -0
  51. data/test/test_log-ext.rb +61 -0
  52. metadata +262 -0
@@ -0,0 +1,214 @@
1
+
2
+ require_relative 'out_opensearch'
3
+
4
+ module Fluent::Plugin
5
+ class OpenSearchOutputDataStream < OpenSearchOutput
6
+
7
+ Fluent::Plugin.register_output('opensearch_data_stream', self)
8
+
9
+ helpers :event_emitter
10
+
11
+ config_param :data_stream_name, :string
12
+ config_param :data_stream_template_name, :string, :default => nil
13
+ # OpenSearch 1.0 or later always support new style of index template.
14
+ config_set_default :use_legacy_template, false
15
+
16
+ INVALID_START_CHRACTERS = ["-", "_", "+", "."]
17
+ INVALID_CHARACTERS = ["\\", "/", "*", "?", "\"", "<", ">", "|", " ", ",", "#", ":"]
18
+
19
+ def configure(conf)
20
+ super
21
+
22
+ @data_stream_template_name = "#{@data_stream_name}_template" if @data_stream_template_name.nil?
23
+
24
+ # ref. https://opensearch.org/docs/latest/opensearch/data-streams/
25
+ unless placeholder?(:data_stream_name_placeholder, @data_stream_name)
26
+ validate_data_stream_parameters
27
+ else
28
+ @use_placeholder = true
29
+ @data_stream_names = []
30
+ end
31
+
32
+ @client = client
33
+ unless @use_placeholder
34
+ begin
35
+ @data_stream_names = [@data_stream_name]
36
+ create_index_template(@data_stream_name, @data_stream_template_name, @host)
37
+ create_data_stream(@data_stream_name)
38
+ rescue => e
39
+ raise Fluent::ConfigError, "Failed to create data stream: <#{@data_stream_name}> #{e.message}"
40
+ end
41
+ end
42
+ end
43
+
44
+ def validate_data_stream_parameters
45
+ {"data_stream_name" => @data_stream_name,
46
+ "data_stream_template_name"=> @data_stream_template_name}.each do |parameter, value|
47
+ unless valid_data_stream_parameters?(value)
48
+ unless start_with_valid_characters?(value)
49
+ if not_dots?(value)
50
+ raise Fluent::ConfigError, "'#{parameter}' must not start with #{INVALID_START_CHRACTERS.join(",")}: <#{value}>"
51
+ else
52
+ raise Fluent::ConfigError, "'#{parameter}' must not be . or ..: <#{value}>"
53
+ end
54
+ end
55
+ unless valid_characters?(value)
56
+ raise Fluent::ConfigError, "'#{parameter}' must not contain invalid characters #{INVALID_CHARACTERS.join(",")}: <#{value}>"
57
+ end
58
+ unless lowercase_only?(value)
59
+ raise Fluent::ConfigError, "'#{parameter}' must be lowercase only: <#{value}>"
60
+ end
61
+ if value.bytes.size > 255
62
+ raise Fluent::ConfigError, "'#{parameter}' must not be longer than 255 bytes: <#{value}>"
63
+ end
64
+ end
65
+ end
66
+ end
67
+
68
+ def create_index_template(datastream_name, template_name, host)
69
+ return if data_stream_exist?(datastream_name) or template_exists?(template_name, host)
70
+ body = {
71
+ "index_patterns" => ["#{datastream_name}*"],
72
+ "data_stream" => {},
73
+ }
74
+ params = {
75
+ name: template_name,
76
+ body: body
77
+ }
78
+ retry_operate(@max_retry_putting_template,
79
+ @fail_on_putting_template_retry_exceed,
80
+ @catch_transport_exception_on_retry) do
81
+ @client.indices.put_index_template(params)
82
+ end
83
+ end
84
+
85
+ def data_stream_exist?(datastream_name)
86
+ params = {
87
+ name: datastream_name
88
+ }
89
+ begin
90
+ # TODO: Use X-Pack equivalent performing DataStream operation method on the following line
91
+ response = @client.perform_request('GET', "/_data_stream/#{datastream_name}", {}, params)
92
+ return (not response.is_a?(OpenSearch::Transport::Transport::Errors::NotFound))
93
+ rescue OpenSearch::Transport::Transport::Errors::NotFound => e
94
+ log.info "Specified data stream does not exist. Will be created: <#{e}>"
95
+ return false
96
+ end
97
+ end
98
+
99
+ def create_data_stream(datastream_name)
100
+ return if data_stream_exist?(datastream_name)
101
+ params = {
102
+ name: datastream_name
103
+ }
104
+ retry_operate(@max_retry_putting_template,
105
+ @fail_on_putting_template_retry_exceed,
106
+ @catch_transport_exception_on_retry) do
107
+ # TODO: Use X-Pack equivalent performing DataStream operation method on the following line
108
+ @client.perform_request('PUT', "/_data_stream/#{datastream_name}", {}, params)
109
+ end
110
+ end
111
+
112
+ def template_exists?(name, host = nil)
113
+ if @use_legacy_template
114
+ client(host).indices.get_template(:name => name)
115
+ else
116
+ client(host).indices.get_index_template(:name => name)
117
+ end
118
+ return true
119
+ rescue OpenSearch::Transport::Transport::Errors::NotFound
120
+ return false
121
+ end
122
+
123
+ def valid_data_stream_parameters?(data_stream_parameter)
124
+ lowercase_only?(data_stream_parameter) and
125
+ valid_characters?(data_stream_parameter) and
126
+ start_with_valid_characters?(data_stream_parameter) and
127
+ not_dots?(data_stream_parameter) and
128
+ data_stream_parameter.bytes.size <= 255
129
+ end
130
+
131
+ def lowercase_only?(data_stream_parameter)
132
+ data_stream_parameter.downcase == data_stream_parameter
133
+ end
134
+
135
+ def valid_characters?(data_stream_parameter)
136
+ not (INVALID_CHARACTERS.each.any? do |v| data_stream_parameter.include?(v) end)
137
+ end
138
+
139
+ def start_with_valid_characters?(data_stream_parameter)
140
+ not (INVALID_START_CHRACTERS.each.any? do |v| data_stream_parameter.start_with?(v) end)
141
+ end
142
+
143
+ def not_dots?(data_stream_parameter)
144
+ not (data_stream_parameter == "." or data_stream_parameter == "..")
145
+ end
146
+
147
+ def client_library_version
148
+ OpenSearch::VERSION
149
+ end
150
+
151
+ def multi_workers_ready?
152
+ true
153
+ end
154
+
155
+ def write(chunk)
156
+ data_stream_name = @data_stream_name
157
+ data_stream_template_name = @data_stream_template_name
158
+ host = @host
159
+ if @use_placeholder
160
+ data_stream_name = extract_placeholders(@data_stream_name, chunk)
161
+ data_stream_template_name = extract_placeholders(@data_stream_template_name, chunk)
162
+ unless @data_stream_names.include?(data_stream_name)
163
+ begin
164
+ create_index_template(data_stream_name, data_stream_template_name, host)
165
+ create_data_stream(data_stream_name)
166
+ @data_stream_names << data_stream_name
167
+ rescue => e
168
+ raise Fluent::ConfigError, "Failed to create data stream: <#{data_stream_name}> #{e.message}"
169
+ end
170
+ end
171
+ end
172
+
173
+ bulk_message = ""
174
+ headers = {
175
+ CREATE_OP => {}
176
+ }
177
+ tag = chunk.metadata.tag
178
+ chunk.msgpack_each do |time, record|
179
+ next unless record.is_a? Hash
180
+
181
+ begin
182
+ record.merge!({"@timestamp" => Time.at(time).iso8601(@time_precision)})
183
+ bulk_message = append_record_to_messages(CREATE_OP, {}, headers, record, bulk_message)
184
+ rescue => e
185
+ router.emit_error_event(tag, time, record, e)
186
+ end
187
+ end
188
+
189
+ params = {
190
+ index: data_stream_name,
191
+ body: bulk_message
192
+ }
193
+ begin
194
+ response = @client.bulk(params)
195
+ if response['errors']
196
+ log.error "Could not bulk insert to Data Stream: #{data_stream_name} #{response}"
197
+ end
198
+ rescue => e
199
+ raise RecoverableRequestFailure, "could not push logs to OpenSearch cluster (#{data_stream_name}): #{e.message}"
200
+ end
201
+ end
202
+
203
+ def append_record_to_messages(op, meta, header, record, msgs)
204
+ header[CREATE_OP] = meta
205
+ msgs << @dump_proc.call(header) << BODY_DELIMITER
206
+ msgs << @dump_proc.call(record) << BODY_DELIMITER
207
+ msgs
208
+ end
209
+
210
+ def retry_stream_retryable?
211
+ @buffer.storable?
212
+ end
213
+ end
214
+ end
data/test/helper.rb ADDED
@@ -0,0 +1,61 @@
1
+ # coding: utf-8
2
+ # SPDX-License-Identifier: Apache-2.0
3
+ #
4
+ # The fluent-plugin-opensearch Contributors require contributions made to
5
+ # this file be licensed under the Apache-2.0 license or a
6
+ # compatible open source license.
7
+ #
8
+ # Modifications Copyright fluent-plugin-opensearch Contributors. See
9
+ # GitHub history for details.
10
+ #
11
+ # Licensed to Uken Inc. under one or more contributor
12
+ # license agreements. See the NOTICE file distributed with
13
+ # this work for additional information regarding copyright
14
+ # ownership. Uken Inc. licenses this file to you under
15
+ # the Apache License, Version 2.0 (the "License"); you may
16
+ # not use this file except in compliance with the License.
17
+ # You may obtain a copy of the License at
18
+ #
19
+ # http://www.apache.org/licenses/LICENSE-2.0
20
+ #
21
+ # Unless required by applicable law or agreed to in writing,
22
+ # software distributed under the License is distributed on an
23
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
24
+ # KIND, either express or implied. See the License for the
25
+ # specific language governing permissions and limitations
26
+ # under the License.
27
+
28
+ require 'simplecov'
29
+ require 'simplecov'
30
+ require 'simplecov-lcov'
31
+
32
+ SimpleCov::Formatter::LcovFormatter.config do |config|
33
+ config.report_with_single_file = true
34
+ config.single_report_path = 'coverage/lcov.info'
35
+ end
36
+
37
+ SimpleCov.formatter = SimpleCov::Formatter::MultiFormatter.new([
38
+ SimpleCov::Formatter::HTMLFormatter,
39
+ SimpleCov::Formatter::LcovFormatter
40
+ ])
41
+
42
+ SimpleCov.start do
43
+ add_filter do |src|
44
+ !(src.filename =~ /^#{SimpleCov.root}\/lib/)
45
+ end
46
+ end
47
+
48
+ # needs to be after simplecov but before test/unit, because fluentd sets default
49
+ # encoding to ASCII-8BIT, but coverall might load git data which could contain a
50
+ # UTF-8 character
51
+ at_exit do
52
+ Encoding.default_internal = 'UTF-8' if defined?(Encoding) && Encoding.respond_to?(:default_internal)
53
+ Encoding.default_external = 'UTF-8' if defined?(Encoding) && Encoding.respond_to?(:default_external)
54
+ end
55
+
56
+ require 'test/unit'
57
+ require 'fluent/test'
58
+ require 'minitest/pride'
59
+
60
+ require 'webmock/test_unit'
61
+ WebMock.disable_net_connect!
@@ -0,0 +1,9 @@
1
+ {
2
+ "order": 5,
3
+ "template": "--index_prefix-----appid---*",
4
+ "settings": {},
5
+ "mappings": {},
6
+ "aliases": {
7
+ "--appid---alias": {}
8
+ }
9
+ }
@@ -0,0 +1,241 @@
1
+ # SPDX-License-Identifier: Apache-2.0
2
+ #
3
+ # The fluent-plugin-opensearch Contributors require contributions made to
4
+ # this file be licensed under the Apache-2.0 license or a
5
+ # compatible open source license.
6
+ #
7
+ # Modifications Copyright fluent-plugin-opensearch Contributors. See
8
+ # GitHub history for details.
9
+ #
10
+ # Licensed to Uken Inc. under one or more contributor
11
+ # license agreements. See the NOTICE file distributed with
12
+ # this work for additional information regarding copyright
13
+ # ownership. Uken Inc. licenses this file to you under
14
+ # the Apache License, Version 2.0 (the "License"); you may
15
+ # not use this file except in compliance with the License.
16
+ # You may obtain a copy of the License at
17
+ #
18
+ # http://www.apache.org/licenses/LICENSE-2.0
19
+ #
20
+ # Unless required by applicable law or agreed to in writing,
21
+ # software distributed under the License is distributed on an
22
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
23
+ # KIND, either express or implied. See the License for the
24
+ # specific language governing permissions and limitations
25
+ # under the License.
26
+
27
+ require_relative '../helper'
28
+ require 'date'
29
+ require 'fluent/test/helpers'
30
+ require 'json'
31
+ require 'fluent/test/driver/filter'
32
+ require 'flexmock/test_unit'
33
+ require 'fluent/plugin/filter_opensearch_genid'
34
+
35
+ class OpenSearchGenidFilterTest < Test::Unit::TestCase
36
+ include FlexMock::TestCase
37
+ include Fluent::Test::Helpers
38
+
39
+ def setup
40
+ Fluent::Test.setup
41
+ end
42
+
43
+ def create_driver(conf='')
44
+ Fluent::Test::Driver::Filter.new(Fluent::Plugin::OpenSearchGenidFilter).configure(conf)
45
+ end
46
+
47
+ test "invalid configuration" do
48
+ assert_raise(Fluent::ConfigError) do
49
+ create_driver("use_record_as_seed true")
50
+ end
51
+ end
52
+
53
+ def sample_record
54
+ {'age' => 26, 'request_id' => '42', 'parent_id' => 'parent', 'routing_id' => 'routing'}
55
+ end
56
+
57
+ def test_configure
58
+ d = create_driver
59
+ assert_equal '_hash', d.instance.hash_id_key
60
+ end
61
+
62
+ data("default" => {"hash_id_key" => "_hash"},
63
+ "custom_key" => {"hash_id_key" => "_edited"},
64
+ )
65
+ def test_filter(data)
66
+ d = create_driver("hash_id_key #{data["hash_id_key"]}")
67
+ flexmock(SecureRandom).should_receive(:uuid)
68
+ .and_return("13a0c028-bf7c-4ae2-ad03-ec09a40006df")
69
+ time = event_time("2017-10-15 15:00:23.34567890 UTC")
70
+ d.run(default_tag: 'test') do
71
+ d.feed(time, sample_record)
72
+ end
73
+ assert_equal(Base64.strict_encode64(SecureRandom.uuid),
74
+ d.filtered.map {|e| e.last}.first[d.instance.hash_id_key])
75
+ end
76
+
77
+ class UseRecordAsSeedTest < self
78
+ data("md5" => ["md5", "PPg+zmH1ASUCpNzMUcTzqw=="],
79
+ "sha1" => ["sha1", "JKfCrEAxeAyRSdcKqkw4unC9xZ8="],
80
+ "sha256" => ["sha256", "9Z9i+897bGivSItD/6i0vye9uRwq/sLwWkxOwydtTJY="],
81
+ "sha512" => ["sha512", "KWI5OdZPaCFW9/CEY3NoGrvueMtjZJdmGdqIVGJP8vgI4uW+0gHExZVaHerw+RhbtIdLCtVZ43xBgMKH+KliQg=="],
82
+ )
83
+ def test_simple(data)
84
+ hash_type, expected = data
85
+ d = create_driver(%[
86
+ use_record_as_seed true
87
+ record_keys age,parent_id,routing_id,custom_key
88
+ hash_type #{hash_type}
89
+ ])
90
+ time = event_time("2017-10-15 15:00:23.34567890 UTC")
91
+ d.run(default_tag: 'test') do
92
+ d.feed(time, sample_record.merge("custom_key" => "This is also encoded value."))
93
+ end
94
+ assert_equal(expected,
95
+ d.filtered.map {|e| e.last}.first[d.instance.hash_id_key])
96
+ end
97
+
98
+ data("md5" => ["md5", "qUO/xqWiOJq4D0ApdoHVEQ=="],
99
+ "sha1" => ["sha1", "v3UWYr90zIH2veGQBVwUH586TuI="],
100
+ "sha256" => ["sha256", "4hwh10qfw9B24NtNFoEFF8wCiImvgIy1Vk4gzcKt5Pw="],
101
+ "sha512" => ["sha512", "TY3arcmC8mhYClDIjQxH8ePRLnHK01Cj5QQL8FxbwNtPQBY3IZ4qJY9CpOusmdWBYwm1golRVQCmURiAhlnWIQ=="],)
102
+ def test_record_with_tag(data)
103
+ hash_type, expected = data
104
+ d = create_driver(%[
105
+ use_record_as_seed true
106
+ record_keys age,parent_id,routing_id,custom_key
107
+ hash_type #{hash_type}
108
+ include_tag_in_seed true
109
+ ])
110
+ time = event_time("2017-10-15 15:00:23.34567890 UTC")
111
+ d.run(default_tag: 'test.fluentd') do
112
+ d.feed(time, sample_record.merge("custom_key" => "This is also encoded value."))
113
+ end
114
+ assert_equal(expected,
115
+ d.filtered.map {|e| e.last}.first[d.instance.hash_id_key])
116
+ end
117
+
118
+ data("md5" => ["md5", "oHo+PoC5I4KC+XCfXvyf9w=="],
119
+ "sha1" => ["sha1", "50Nwarm2225gLy1ka8d9i+W6cKA="],
120
+ "sha256" => ["sha256", "ReX1XgizcrHjBc0sQwx9Sjuf2QBFll2njYf4ee+XSIc="],
121
+ "sha512" => ["sha512", "8bcpZrqNUQIz6opdoVZz0MwxP8r9SCqOEPkWF6xGLlFwPCJVqk2SQp99m8rPufr0xPIgvZyOMejA5slBV9xrdg=="],)
122
+ def test_record_with_time(data)
123
+ hash_type, expected = data
124
+ d = create_driver(%[
125
+ use_record_as_seed true
126
+ record_keys age,parent_id,routing_id,custom_key
127
+ hash_type #{hash_type}
128
+ include_time_in_seed true
129
+ ])
130
+ time = event_time("2017-10-15 15:00:23.34567890 UTC")
131
+ d.run(default_tag: 'test.fluentd') do
132
+ d.feed(time, sample_record.merge("custom_key" => "This is also encoded value."))
133
+ end
134
+ assert_equal(expected,
135
+ d.filtered.map {|e| e.last}.first[d.instance.hash_id_key])
136
+ end
137
+
138
+ data("md5" => ["md5", "u7/hr09gDC9CM5DI7tLc2Q=="],
139
+ "sha1" => ["sha1", "1WgptcTnVSHtTAlNUwNcoiaY3oM="],
140
+ "sha256" => ["sha256", "1iWZHI19m/A1VH8iFK7H2KFoyLdszpJRiVeKBv1Ndis="],
141
+ "sha512" => ["sha512", "NM+ui0lUmeDaEJsT7c9EyTc+lQBbRf1x6MQXXYdxp21CX3jZvHy3IT8Xp9ZdIKevZwhoo3Suo/tIBlfyLFXJXw=="],)
142
+ def test_record_with_tag_and_time
143
+ hash_type, expected = data
144
+ d = create_driver(%[
145
+ use_record_as_seed true
146
+ record_keys age,parent_id,routing_id,custom_key
147
+ hash_type #{hash_type}
148
+ include_tag_in_seed true
149
+ include_time_in_seed true
150
+ ])
151
+ time = event_time("2017-10-15 15:00:23.34567890 UTC")
152
+ d.run(default_tag: 'test.fluentd') do
153
+ d.feed(time, sample_record.merge("custom_key" => "This is also encoded value."))
154
+ end
155
+ assert_equal(expected,
156
+ d.filtered.map {|e| e.last}.first[d.instance.hash_id_key])
157
+ end
158
+ end
159
+
160
+ class UseEntireRecordAsSeedTest < self
161
+ data("md5" => ["md5", "MuMU0gHOP1cWvvg/J4aEFg=="],
162
+ "sha1" => ["sha1", "GZ6Iup9Ywyk5spCWtPQbtZnfK0U="],
163
+ "sha256" => ["sha256", "O4YN0RiXCUAYeaR97UUULRLxgra/R2dvTV47viir5l4="],
164
+ "sha512" => ["sha512", "FtbwO1xsLUq0KcO0mj0l80rbwFH5rGE3vL+Vgh90+4R/9j+/Ni/ipwhiOoUcetDxj1r5Vf/92B54La+QTu3eMA=="],)
165
+ def test_record
166
+ hash_type, expected = data
167
+ d = create_driver(%[
168
+ use_record_as_seed true
169
+ use_entire_record true
170
+ hash_type #{hash_type}
171
+ ])
172
+ time = event_time("2017-10-15 15:00:23.34567890 UTC")
173
+ d.run(default_tag: 'test.fluentd') do
174
+ d.feed(time, sample_record.merge("custom_key" => "This is also encoded value."))
175
+ end
176
+ assert_equal(expected,
177
+ d.filtered.map {|e| e.last}.first[d.instance.hash_id_key])
178
+ end
179
+
180
+ data("md5" => ["md5", "GJfpWe8ofiGzn97bc9Gh0Q=="],
181
+ "sha1" => ["sha1", "AVaK67Tz0bEJ8xNEzjOQ6r9fAu4="],
182
+ "sha256" => ["sha256", "WIXWAuf/Z94Uw95mudloo2bgjhSsSduQIwkKTQsNFgU="],
183
+ "sha512" => ["sha512", "yjMGGxy8uc7gCrPgm8W6MzJGLFk0GtUwJ6w/91laf6WNywuvG/7T6kNHLagAV8rSW8xzxmtEfyValBO5scuoKw=="],)
184
+ def test_record_with_tag
185
+ hash_type, expected = data
186
+ d = create_driver(%[
187
+ use_record_as_seed true
188
+ use_entire_record true
189
+ hash_type #{hash_type}
190
+ include_tag_in_seed true
191
+ ])
192
+ time = event_time("2017-10-15 15:00:23.34567890 UTC")
193
+ d.run(default_tag: 'test.fluentd') do
194
+ d.feed(time, sample_record.merge("custom_key" => "This is also encoded value."))
195
+ end
196
+ assert_equal(expected,
197
+ d.filtered.map {|e| e.last}.first[d.instance.hash_id_key])
198
+ end
199
+
200
+ data("md5" => ["md5", "5nQSaJ4F1p9rDFign13Lfg=="],
201
+ "sha1" => ["sha1", "hyo9+0ZFBpizKl2NShs3C8yQcGw="],
202
+ "sha256" => ["sha256", "romVsZSIksbqYsOSnUzolZQw76ankcy0DgvDZ3CayTo="],
203
+ "sha512" => ["sha512", "RPU7K2Pt0iVyvV7p5usqcUIIOmfTajD1aa7pkR9qZ89UARH/lpm6ESY9iwuYJj92lxOUuF5OxlEwvV7uXJ07iA=="],)
204
+ def test_record_with_time
205
+ hash_type, expected = data
206
+ d = create_driver(%[
207
+ use_record_as_seed true
208
+ use_entire_record true
209
+ hash_type #{hash_type}
210
+ include_time_in_seed true
211
+ ])
212
+ time = event_time("2017-10-15 15:00:23.34567890 UTC")
213
+ d.run(default_tag: 'test.fluentd') do
214
+ d.feed(time, sample_record.merge("custom_key" => "This is also encoded value."))
215
+ end
216
+ assert_equal(expected,
217
+ d.filtered.map {|e| e.last}.first[d.instance.hash_id_key])
218
+ end
219
+
220
+ data("md5" => ["md5", "zGQF35KlMUibJAcgkgQDtw=="],
221
+ "sha1" => ["sha1", "1x9RZO1xEuWps090qq4DUIsU9x8="],
222
+ "sha256" => ["sha256", "eulMz0eF56lBEf31aIs0OG2TGCH/aoPfZbRqfEOkAwk="],
223
+ "sha512" => ["sha512", "mIiYATtpdUFEFCIZg1FdKssIs7oWY0gJjhSSbet0ddUmqB+CiQAcAMTmrXO6AVSH0vsMvao/8vtC8AsIPfF1fA=="],)
224
+ def test_record_with_tag_and_time
225
+ hash_type, expected = data
226
+ d = create_driver(%[
227
+ use_record_as_seed true
228
+ use_entire_record true
229
+ hash_type #{hash_type}
230
+ include_tag_in_seed true
231
+ include_time_in_seed true
232
+ ])
233
+ time = event_time("2017-10-15 15:00:23.34567890 UTC")
234
+ d.run(default_tag: 'test.fluentd') do
235
+ d.feed(time, sample_record.merge("custom_key" => "This is also encoded value."))
236
+ end
237
+ assert_equal(expected,
238
+ d.filtered.map {|e| e.last}.first[d.instance.hash_id_key])
239
+ end
240
+ end
241
+ end