fluent-plugin-input-opensearch 1.1.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. checksums.yaml +7 -0
  2. data/.coveralls.yml +1 -0
  3. data/.editorconfig +9 -0
  4. data/.github/ISSUE_TEMPLATE/bug_report.md +29 -0
  5. data/.github/ISSUE_TEMPLATE/feature_request.md +24 -0
  6. data/.github/PULL_REQUEST_TEMPLATE/pull_request_template.md +9 -0
  7. data/.github/workflows/coverage.yaml +22 -0
  8. data/.github/workflows/issue-auto-closer.yml +12 -0
  9. data/.github/workflows/linux.yml +26 -0
  10. data/.github/workflows/macos.yml +26 -0
  11. data/.github/workflows/windows.yml +26 -0
  12. data/.gitignore +18 -0
  13. data/CONTRIBUTING.md +24 -0
  14. data/Gemfile +10 -0
  15. data/History.md +67 -0
  16. data/LICENSE.txt +201 -0
  17. data/README.OpenSearchGenID.md +116 -0
  18. data/README.OpenSearchInput.md +314 -0
  19. data/README.Troubleshooting.md +482 -0
  20. data/README.md +1622 -0
  21. data/Rakefile +37 -0
  22. data/fluent-plugin-opensearch.gemspec +39 -0
  23. data/gemfiles/Gemfile.elasticsearch.v6 +12 -0
  24. data/lib/fluent/log-ext.rb +64 -0
  25. data/lib/fluent/plugin/filter_opensearch_genid.rb +103 -0
  26. data/lib/fluent/plugin/in_opensearch.rb +410 -0
  27. data/lib/fluent/plugin/oj_serializer.rb +48 -0
  28. data/lib/fluent/plugin/opensearch_constants.rb +39 -0
  29. data/lib/fluent/plugin/opensearch_error.rb +31 -0
  30. data/lib/fluent/plugin/opensearch_error_handler.rb +182 -0
  31. data/lib/fluent/plugin/opensearch_fallback_selector.rb +36 -0
  32. data/lib/fluent/plugin/opensearch_index_template.rb +155 -0
  33. data/lib/fluent/plugin/opensearch_simple_sniffer.rb +36 -0
  34. data/lib/fluent/plugin/opensearch_tls.rb +96 -0
  35. data/lib/fluent/plugin/out_opensearch.rb +1158 -0
  36. data/lib/fluent/plugin/out_opensearch_data_stream.rb +229 -0
  37. data/test/helper.rb +60 -0
  38. data/test/plugin/datastream_template.json +4 -0
  39. data/test/plugin/test_alias_template.json +9 -0
  40. data/test/plugin/test_filter_opensearch_genid.rb +241 -0
  41. data/test/plugin/test_in_opensearch.rb +500 -0
  42. data/test/plugin/test_index_alias_template.json +11 -0
  43. data/test/plugin/test_index_template.json +25 -0
  44. data/test/plugin/test_oj_serializer.rb +45 -0
  45. data/test/plugin/test_opensearch_error_handler.rb +770 -0
  46. data/test/plugin/test_opensearch_fallback_selector.rb +100 -0
  47. data/test/plugin/test_opensearch_tls.rb +171 -0
  48. data/test/plugin/test_out_opensearch.rb +3980 -0
  49. data/test/plugin/test_out_opensearch_data_stream.rb +746 -0
  50. data/test/plugin/test_template.json +23 -0
  51. data/test/test_log-ext.rb +61 -0
  52. metadata +291 -0
@@ -0,0 +1,229 @@
1
+
2
+ require_relative 'out_opensearch'
3
+
4
+ module Fluent::Plugin
5
+ class OpenSearchOutputDataStream < OpenSearchOutput
6
+
7
+ Fluent::Plugin.register_output('opensearch_data_stream', self)
8
+
9
+ helpers :event_emitter
10
+
11
+ config_param :data_stream_name, :string
12
+ config_param :data_stream_template_name, :string, :default => nil
13
+ # OpenSearch 1.0 or later always support new style of index template.
14
+ config_set_default :use_legacy_template, false
15
+
16
+ INVALID_START_CHRACTERS = ["-", "_", "+", "."]
17
+ INVALID_CHARACTERS = ["\\", "/", "*", "?", "\"", "<", ">", "|", " ", ",", "#", ":"]
18
+
19
+ def configure(conf)
20
+ super
21
+
22
+ @data_stream_template_name = "#{@data_stream_name}_template" if @data_stream_template_name.nil?
23
+
24
+ # ref. https://opensearch.org/docs/latest/opensearch/data-streams/
25
+ unless placeholder?(:data_stream_name_placeholder, @data_stream_name)
26
+ validate_data_stream_parameters
27
+ else
28
+ @use_placeholder = true
29
+ @data_stream_names = []
30
+ end
31
+
32
+ unless @use_placeholder
33
+ begin
34
+ @data_stream_names = [@data_stream_name]
35
+ retry_operate(@max_retry_putting_template,
36
+ @fail_on_putting_template_retry_exceed,
37
+ @catch_transport_exception_on_retry) do
38
+ create_index_template(@data_stream_name, @data_stream_template_name)
39
+ end
40
+ rescue => e
41
+ raise Fluent::ConfigError, "Failed to create data stream: <#{@data_stream_name}> #{e.message}"
42
+ end
43
+ end
44
+ end
45
+
46
+ def validate_data_stream_parameters
47
+ {"data_stream_name" => @data_stream_name,
48
+ "data_stream_template_name" => @data_stream_template_name}.each do |parameter, value|
49
+ unless valid_data_stream_parameters?(value)
50
+ unless start_with_valid_characters?(value)
51
+ if not_dots?(value)
52
+ raise Fluent::ConfigError, "'#{parameter}' must not start with #{INVALID_START_CHRACTERS.join(",")}: <#{value}>"
53
+ else
54
+ raise Fluent::ConfigError, "'#{parameter}' must not be . or ..: <#{value}>"
55
+ end
56
+ end
57
+ unless valid_characters?(value)
58
+ raise Fluent::ConfigError, "'#{parameter}' must not contain invalid characters #{INVALID_CHARACTERS.join(",")}: <#{value}>"
59
+ end
60
+ unless lowercase_only?(value)
61
+ raise Fluent::ConfigError, "'#{parameter}' must be lowercase only: <#{value}>"
62
+ end
63
+ if value.bytes.size > 255
64
+ raise Fluent::ConfigError, "'#{parameter}' must not be longer than 255 bytes: <#{value}>"
65
+ end
66
+ end
67
+ end
68
+ end
69
+
70
+ def create_index_template(datastream_name, template_name, host = nil)
71
+ # Create index template from file
72
+ if !dry_run?
73
+ if @template_file
74
+ return if data_stream_exist?(datastream_name, host) or template_exists?(template_name, host)
75
+ template_installation_actual(template_name, @customize_template, @application_name, datastream_name, host)
76
+ else # Create default index template
77
+ return if data_stream_exist?(datastream_name, host) or template_exists?(template_name, host)
78
+ body = {
79
+ "index_patterns" => ["#{datastream_name}*"],
80
+ "data_stream" => {},
81
+ }
82
+
83
+ params = {
84
+ name: template_name,
85
+ body: body
86
+ }
87
+ retry_operate(@max_retry_putting_template,
88
+ @fail_on_putting_template_retry_exceed,
89
+ @catch_transport_exception_on_retry) do
90
+ client(host).indices.put_index_template(params)
91
+ end
92
+ end
93
+ end
94
+ end
95
+
96
+ def data_stream_exist?(datastream_name, host = nil)
97
+ params = {
98
+ name: datastream_name
99
+ }
100
+ begin
101
+ # TODO: Use X-Pack equivalent performing DataStream operation method on the following line
102
+ response = client(host).perform_request('GET', "/_data_stream/#{datastream_name}", {}, params)
103
+ return (not response.is_a?(OpenSearch::Transport::Transport::Errors::NotFound))
104
+ rescue OpenSearch::Transport::Transport::Errors::NotFound => e
105
+ log.info "Specified data stream does not exist. Will be created: <#{e}>"
106
+ return false
107
+ end
108
+ end
109
+
110
+ def template_exists?(name, host = nil)
111
+ if @use_legacy_template
112
+ client(host).indices.get_template(:name => name)
113
+ else
114
+ client(host).indices.get_index_template(:name => name)
115
+ end
116
+ return true
117
+ rescue OpenSearch::Transport::Transport::Errors::NotFound
118
+ return false
119
+ end
120
+
121
+ def valid_data_stream_parameters?(data_stream_parameter)
122
+ lowercase_only?(data_stream_parameter) and
123
+ valid_characters?(data_stream_parameter) and
124
+ start_with_valid_characters?(data_stream_parameter) and
125
+ not_dots?(data_stream_parameter) and
126
+ data_stream_parameter.bytes.size <= 255
127
+ end
128
+
129
+ def lowercase_only?(data_stream_parameter)
130
+ data_stream_parameter.downcase == data_stream_parameter
131
+ end
132
+
133
+ def valid_characters?(data_stream_parameter)
134
+ not (INVALID_CHARACTERS.each.any? do |v| data_stream_parameter.include?(v) end)
135
+ end
136
+
137
+ def start_with_valid_characters?(data_stream_parameter)
138
+ not (INVALID_START_CHRACTERS.each.any? do |v| data_stream_parameter.start_with?(v) end)
139
+ end
140
+
141
+ def not_dots?(data_stream_parameter)
142
+ not (data_stream_parameter == "." or data_stream_parameter == "..")
143
+ end
144
+
145
+ def client_library_version
146
+ OpenSearch::VERSION
147
+ end
148
+
149
+ def multi_workers_ready?
150
+ true
151
+ end
152
+
153
+ def write(chunk)
154
+ data_stream_name = @data_stream_name
155
+ data_stream_template_name = @data_stream_template_name
156
+ host = nil
157
+ if @use_placeholder
158
+ host = if @hosts
159
+ extract_placeholders(@hosts, chunk)
160
+ else
161
+ extract_placeholders(@host, chunk)
162
+ end
163
+ data_stream_name = extract_placeholders(@data_stream_name, chunk).downcase
164
+ data_stream_template_name = extract_placeholders(@data_stream_template_name, chunk).downcase
165
+ begin
166
+ create_index_template(data_stream_name, data_stream_template_name, host)
167
+ rescue => e
168
+ raise Fluent::ConfigError, "Failed to create data stream: <#{data_stream_name}> #{e.message}"
169
+ end
170
+ end
171
+
172
+ bulk_message = ""
173
+ headers = {
174
+ CREATE_OP => {}
175
+ }
176
+ tag = chunk.metadata.tag
177
+ chunk.msgpack_each do |time, record|
178
+ next unless record.is_a? Hash
179
+ begin
180
+ if record.has_key?(TIMESTAMP_FIELD)
181
+ rts = record[TIMESTAMP_FIELD]
182
+ dt = parse_time(rts, time, tag)
183
+ elsif record.has_key?(@time_key)
184
+ rts = record[@time_key]
185
+ dt = parse_time(rts, time, tag)
186
+ else
187
+ dt = Time.at(time).to_datetime
188
+ end
189
+ record.merge!({"@timestamp" => dt.iso8601(@time_precision)})
190
+ if @include_tag_key
191
+ record[@tag_key] = tag
192
+ end
193
+ if @remove_keys
194
+ @remove_keys.each { |key| record.delete(key) }
195
+ end
196
+ bulk_message = append_record_to_messages(CREATE_OP, {}, headers, record, bulk_message)
197
+ rescue => e
198
+ emit_error_label_event do
199
+ router.emit_error_event(tag, time, record, e)
200
+ end
201
+ end
202
+ end
203
+
204
+ params = {
205
+ index: data_stream_name,
206
+ body: bulk_message
207
+ }
208
+ begin
209
+ response = client(host).bulk(params)
210
+ if response['errors']
211
+ log.error "Could not bulk insert to Data Stream: #{data_stream_name} #{response}"
212
+ end
213
+ rescue => e
214
+ raise RecoverableRequestFailure, "could not push logs to OpenSearch cluster (#{data_stream_name}): #{e.message}"
215
+ end
216
+ end
217
+
218
+ def append_record_to_messages(op, meta, header, record, msgs)
219
+ header[CREATE_OP] = meta
220
+ msgs << @dump_proc.call(header) << BODY_DELIMITER
221
+ msgs << @dump_proc.call(record) << BODY_DELIMITER
222
+ msgs
223
+ end
224
+
225
+ def retry_stream_retryable?
226
+ @buffer.storable?
227
+ end
228
+ end
229
+ end
data/test/helper.rb ADDED
@@ -0,0 +1,60 @@
1
+ # coding: utf-8
2
+ # SPDX-License-Identifier: Apache-2.0
3
+ #
4
+ # The fluent-plugin-opensearch Contributors require contributions made to
5
+ # this file be licensed under the Apache-2.0 license or a
6
+ # compatible open source license.
7
+ #
8
+ # Modifications Copyright fluent-plugin-opensearch Contributors. See
9
+ # GitHub history for details.
10
+ #
11
+ # Licensed to Uken Inc. under one or more contributor
12
+ # license agreements. See the NOTICE file distributed with
13
+ # this work for additional information regarding copyright
14
+ # ownership. Uken Inc. licenses this file to you under
15
+ # the Apache License, Version 2.0 (the "License"); you may
16
+ # not use this file except in compliance with the License.
17
+ # You may obtain a copy of the License at
18
+ #
19
+ # http://www.apache.org/licenses/LICENSE-2.0
20
+ #
21
+ # Unless required by applicable law or agreed to in writing,
22
+ # software distributed under the License is distributed on an
23
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
24
+ # KIND, either express or implied. See the License for the
25
+ # specific language governing permissions and limitations
26
+ # under the License.
27
+
28
+ require 'simplecov'
29
+ require 'simplecov'
30
+ require 'simplecov-lcov'
31
+
32
+ SimpleCov::Formatter::LcovFormatter.config do |config|
33
+ config.report_with_single_file = true
34
+ config.single_report_path = 'coverage/lcov.info'
35
+ end
36
+
37
+ SimpleCov.formatter = SimpleCov::Formatter::MultiFormatter.new([
38
+ SimpleCov::Formatter::HTMLFormatter,
39
+ SimpleCov::Formatter::LcovFormatter
40
+ ])
41
+
42
+ SimpleCov.start do
43
+ add_filter do |src|
44
+ !(src.filename =~ /^#{SimpleCov.root}\/lib/)
45
+ end
46
+ end
47
+
48
+ # needs to be after simplecov but before test/unit, because fluentd sets default
49
+ # encoding to ASCII-8BIT, but coverall might load git data which could contain a
50
+ # UTF-8 character
51
+ at_exit do
52
+ Encoding.default_internal = 'UTF-8' if defined?(Encoding) && Encoding.respond_to?(:default_internal)
53
+ Encoding.default_external = 'UTF-8' if defined?(Encoding) && Encoding.respond_to?(:default_external)
54
+ end
55
+
56
+ require 'test/unit'
57
+ require 'fluent/test'
58
+
59
+ require 'webmock/test_unit'
60
+ WebMock.disable_net_connect!
@@ -0,0 +1,4 @@
1
+ {
2
+ "index_patterns": ["foo*"],
3
+ "data_stream": {}
4
+ }
@@ -0,0 +1,9 @@
1
+ {
2
+ "order": 5,
3
+ "template": "--index_prefix-----appid---*",
4
+ "settings": {},
5
+ "mappings": {},
6
+ "aliases": {
7
+ "--appid---alias": {}
8
+ }
9
+ }
@@ -0,0 +1,241 @@
1
+ # SPDX-License-Identifier: Apache-2.0
2
+ #
3
+ # The fluent-plugin-opensearch Contributors require contributions made to
4
+ # this file be licensed under the Apache-2.0 license or a
5
+ # compatible open source license.
6
+ #
7
+ # Modifications Copyright fluent-plugin-opensearch Contributors. See
8
+ # GitHub history for details.
9
+ #
10
+ # Licensed to Uken Inc. under one or more contributor
11
+ # license agreements. See the NOTICE file distributed with
12
+ # this work for additional information regarding copyright
13
+ # ownership. Uken Inc. licenses this file to you under
14
+ # the Apache License, Version 2.0 (the "License"); you may
15
+ # not use this file except in compliance with the License.
16
+ # You may obtain a copy of the License at
17
+ #
18
+ # http://www.apache.org/licenses/LICENSE-2.0
19
+ #
20
+ # Unless required by applicable law or agreed to in writing,
21
+ # software distributed under the License is distributed on an
22
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
23
+ # KIND, either express or implied. See the License for the
24
+ # specific language governing permissions and limitations
25
+ # under the License.
26
+
27
+ require_relative '../helper'
28
+ require 'date'
29
+ require 'fluent/test/helpers'
30
+ require 'json'
31
+ require 'fluent/test/driver/filter'
32
+ require 'flexmock/test_unit'
33
+ require 'fluent/plugin/filter_opensearch_genid'
34
+
35
+ class OpenSearchGenidFilterTest < Test::Unit::TestCase
36
+ include FlexMock::TestCase
37
+ include Fluent::Test::Helpers
38
+
39
+ def setup
40
+ Fluent::Test.setup
41
+ end
42
+
43
+ def create_driver(conf='')
44
+ Fluent::Test::Driver::Filter.new(Fluent::Plugin::OpenSearchGenidFilter).configure(conf)
45
+ end
46
+
47
+ test "invalid configuration" do
48
+ assert_raise(Fluent::ConfigError) do
49
+ create_driver("use_record_as_seed true")
50
+ end
51
+ end
52
+
53
+ def sample_record
54
+ {'age' => 26, 'request_id' => '42', 'parent_id' => 'parent', 'routing_id' => 'routing'}
55
+ end
56
+
57
+ def test_configure
58
+ d = create_driver
59
+ assert_equal '_hash', d.instance.hash_id_key
60
+ end
61
+
62
+ data("default" => {"hash_id_key" => "_hash"},
63
+ "custom_key" => {"hash_id_key" => "_edited"},
64
+ )
65
+ def test_filter(data)
66
+ d = create_driver("hash_id_key #{data["hash_id_key"]}")
67
+ flexmock(SecureRandom).should_receive(:uuid)
68
+ .and_return("13a0c028-bf7c-4ae2-ad03-ec09a40006df")
69
+ time = event_time("2017-10-15 15:00:23.34567890 UTC")
70
+ d.run(default_tag: 'test') do
71
+ d.feed(time, sample_record)
72
+ end
73
+ assert_equal(Base64.strict_encode64(SecureRandom.uuid),
74
+ d.filtered.map {|e| e.last}.first[d.instance.hash_id_key])
75
+ end
76
+
77
+ class UseRecordAsSeedTest < self
78
+ data("md5" => ["md5", "PPg+zmH1ASUCpNzMUcTzqw=="],
79
+ "sha1" => ["sha1", "JKfCrEAxeAyRSdcKqkw4unC9xZ8="],
80
+ "sha256" => ["sha256", "9Z9i+897bGivSItD/6i0vye9uRwq/sLwWkxOwydtTJY="],
81
+ "sha512" => ["sha512", "KWI5OdZPaCFW9/CEY3NoGrvueMtjZJdmGdqIVGJP8vgI4uW+0gHExZVaHerw+RhbtIdLCtVZ43xBgMKH+KliQg=="],
82
+ )
83
+ def test_simple(data)
84
+ hash_type, expected = data
85
+ d = create_driver(%[
86
+ use_record_as_seed true
87
+ record_keys age,parent_id,routing_id,custom_key
88
+ hash_type #{hash_type}
89
+ ])
90
+ time = event_time("2017-10-15 15:00:23.34567890 UTC")
91
+ d.run(default_tag: 'test') do
92
+ d.feed(time, sample_record.merge("custom_key" => "This is also encoded value."))
93
+ end
94
+ assert_equal(expected,
95
+ d.filtered.map {|e| e.last}.first[d.instance.hash_id_key])
96
+ end
97
+
98
+ data("md5" => ["md5", "qUO/xqWiOJq4D0ApdoHVEQ=="],
99
+ "sha1" => ["sha1", "v3UWYr90zIH2veGQBVwUH586TuI="],
100
+ "sha256" => ["sha256", "4hwh10qfw9B24NtNFoEFF8wCiImvgIy1Vk4gzcKt5Pw="],
101
+ "sha512" => ["sha512", "TY3arcmC8mhYClDIjQxH8ePRLnHK01Cj5QQL8FxbwNtPQBY3IZ4qJY9CpOusmdWBYwm1golRVQCmURiAhlnWIQ=="],)
102
+ def test_record_with_tag(data)
103
+ hash_type, expected = data
104
+ d = create_driver(%[
105
+ use_record_as_seed true
106
+ record_keys age,parent_id,routing_id,custom_key
107
+ hash_type #{hash_type}
108
+ include_tag_in_seed true
109
+ ])
110
+ time = event_time("2017-10-15 15:00:23.34567890 UTC")
111
+ d.run(default_tag: 'test.fluentd') do
112
+ d.feed(time, sample_record.merge("custom_key" => "This is also encoded value."))
113
+ end
114
+ assert_equal(expected,
115
+ d.filtered.map {|e| e.last}.first[d.instance.hash_id_key])
116
+ end
117
+
118
+ data("md5" => ["md5", "oHo+PoC5I4KC+XCfXvyf9w=="],
119
+ "sha1" => ["sha1", "50Nwarm2225gLy1ka8d9i+W6cKA="],
120
+ "sha256" => ["sha256", "ReX1XgizcrHjBc0sQwx9Sjuf2QBFll2njYf4ee+XSIc="],
121
+ "sha512" => ["sha512", "8bcpZrqNUQIz6opdoVZz0MwxP8r9SCqOEPkWF6xGLlFwPCJVqk2SQp99m8rPufr0xPIgvZyOMejA5slBV9xrdg=="],)
122
+ def test_record_with_time(data)
123
+ hash_type, expected = data
124
+ d = create_driver(%[
125
+ use_record_as_seed true
126
+ record_keys age,parent_id,routing_id,custom_key
127
+ hash_type #{hash_type}
128
+ include_time_in_seed true
129
+ ])
130
+ time = event_time("2017-10-15 15:00:23.34567890 UTC")
131
+ d.run(default_tag: 'test.fluentd') do
132
+ d.feed(time, sample_record.merge("custom_key" => "This is also encoded value."))
133
+ end
134
+ assert_equal(expected,
135
+ d.filtered.map {|e| e.last}.first[d.instance.hash_id_key])
136
+ end
137
+
138
+ data("md5" => ["md5", "u7/hr09gDC9CM5DI7tLc2Q=="],
139
+ "sha1" => ["sha1", "1WgptcTnVSHtTAlNUwNcoiaY3oM="],
140
+ "sha256" => ["sha256", "1iWZHI19m/A1VH8iFK7H2KFoyLdszpJRiVeKBv1Ndis="],
141
+ "sha512" => ["sha512", "NM+ui0lUmeDaEJsT7c9EyTc+lQBbRf1x6MQXXYdxp21CX3jZvHy3IT8Xp9ZdIKevZwhoo3Suo/tIBlfyLFXJXw=="],)
142
+ def test_record_with_tag_and_time
143
+ hash_type, expected = data
144
+ d = create_driver(%[
145
+ use_record_as_seed true
146
+ record_keys age,parent_id,routing_id,custom_key
147
+ hash_type #{hash_type}
148
+ include_tag_in_seed true
149
+ include_time_in_seed true
150
+ ])
151
+ time = event_time("2017-10-15 15:00:23.34567890 UTC")
152
+ d.run(default_tag: 'test.fluentd') do
153
+ d.feed(time, sample_record.merge("custom_key" => "This is also encoded value."))
154
+ end
155
+ assert_equal(expected,
156
+ d.filtered.map {|e| e.last}.first[d.instance.hash_id_key])
157
+ end
158
+ end
159
+
160
+ class UseEntireRecordAsSeedTest < self
161
+ data("md5" => ["md5", "MuMU0gHOP1cWvvg/J4aEFg=="],
162
+ "sha1" => ["sha1", "GZ6Iup9Ywyk5spCWtPQbtZnfK0U="],
163
+ "sha256" => ["sha256", "O4YN0RiXCUAYeaR97UUULRLxgra/R2dvTV47viir5l4="],
164
+ "sha512" => ["sha512", "FtbwO1xsLUq0KcO0mj0l80rbwFH5rGE3vL+Vgh90+4R/9j+/Ni/ipwhiOoUcetDxj1r5Vf/92B54La+QTu3eMA=="],)
165
+ def test_record
166
+ hash_type, expected = data
167
+ d = create_driver(%[
168
+ use_record_as_seed true
169
+ use_entire_record true
170
+ hash_type #{hash_type}
171
+ ])
172
+ time = event_time("2017-10-15 15:00:23.34567890 UTC")
173
+ d.run(default_tag: 'test.fluentd') do
174
+ d.feed(time, sample_record.merge("custom_key" => "This is also encoded value."))
175
+ end
176
+ assert_equal(expected,
177
+ d.filtered.map {|e| e.last}.first[d.instance.hash_id_key])
178
+ end
179
+
180
+ data("md5" => ["md5", "GJfpWe8ofiGzn97bc9Gh0Q=="],
181
+ "sha1" => ["sha1", "AVaK67Tz0bEJ8xNEzjOQ6r9fAu4="],
182
+ "sha256" => ["sha256", "WIXWAuf/Z94Uw95mudloo2bgjhSsSduQIwkKTQsNFgU="],
183
+ "sha512" => ["sha512", "yjMGGxy8uc7gCrPgm8W6MzJGLFk0GtUwJ6w/91laf6WNywuvG/7T6kNHLagAV8rSW8xzxmtEfyValBO5scuoKw=="],)
184
+ def test_record_with_tag
185
+ hash_type, expected = data
186
+ d = create_driver(%[
187
+ use_record_as_seed true
188
+ use_entire_record true
189
+ hash_type #{hash_type}
190
+ include_tag_in_seed true
191
+ ])
192
+ time = event_time("2017-10-15 15:00:23.34567890 UTC")
193
+ d.run(default_tag: 'test.fluentd') do
194
+ d.feed(time, sample_record.merge("custom_key" => "This is also encoded value."))
195
+ end
196
+ assert_equal(expected,
197
+ d.filtered.map {|e| e.last}.first[d.instance.hash_id_key])
198
+ end
199
+
200
+ data("md5" => ["md5", "5nQSaJ4F1p9rDFign13Lfg=="],
201
+ "sha1" => ["sha1", "hyo9+0ZFBpizKl2NShs3C8yQcGw="],
202
+ "sha256" => ["sha256", "romVsZSIksbqYsOSnUzolZQw76ankcy0DgvDZ3CayTo="],
203
+ "sha512" => ["sha512", "RPU7K2Pt0iVyvV7p5usqcUIIOmfTajD1aa7pkR9qZ89UARH/lpm6ESY9iwuYJj92lxOUuF5OxlEwvV7uXJ07iA=="],)
204
+ def test_record_with_time
205
+ hash_type, expected = data
206
+ d = create_driver(%[
207
+ use_record_as_seed true
208
+ use_entire_record true
209
+ hash_type #{hash_type}
210
+ include_time_in_seed true
211
+ ])
212
+ time = event_time("2017-10-15 15:00:23.34567890 UTC")
213
+ d.run(default_tag: 'test.fluentd') do
214
+ d.feed(time, sample_record.merge("custom_key" => "This is also encoded value."))
215
+ end
216
+ assert_equal(expected,
217
+ d.filtered.map {|e| e.last}.first[d.instance.hash_id_key])
218
+ end
219
+
220
+ data("md5" => ["md5", "zGQF35KlMUibJAcgkgQDtw=="],
221
+ "sha1" => ["sha1", "1x9RZO1xEuWps090qq4DUIsU9x8="],
222
+ "sha256" => ["sha256", "eulMz0eF56lBEf31aIs0OG2TGCH/aoPfZbRqfEOkAwk="],
223
+ "sha512" => ["sha512", "mIiYATtpdUFEFCIZg1FdKssIs7oWY0gJjhSSbet0ddUmqB+CiQAcAMTmrXO6AVSH0vsMvao/8vtC8AsIPfF1fA=="],)
224
+ def test_record_with_tag_and_time
225
+ hash_type, expected = data
226
+ d = create_driver(%[
227
+ use_record_as_seed true
228
+ use_entire_record true
229
+ hash_type #{hash_type}
230
+ include_tag_in_seed true
231
+ include_time_in_seed true
232
+ ])
233
+ time = event_time("2017-10-15 15:00:23.34567890 UTC")
234
+ d.run(default_tag: 'test.fluentd') do
235
+ d.feed(time, sample_record.merge("custom_key" => "This is also encoded value."))
236
+ end
237
+ assert_equal(expected,
238
+ d.filtered.map {|e| e.last}.first[d.instance.hash_id_key])
239
+ end
240
+ end
241
+ end