logstash-input-dynamodb 2.0.0-java → 2.0.1-java

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,38 @@
1
+ Gem::Specification.new do |s|
2
+ s.name = 'logstash-input-dynamodb'
3
+ s.version = '2.0.1'
4
+ s.licenses = ['Apache License (2.0)']
5
+ s.summary = "This input plugin scans a specified DynamoDB table and then reads changes to a DynamoDB table from the associated DynamoDB Stream."
6
+ s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/plugin install gemname. This gem is not a stand-alone program"
7
+ s.authors = ["Amazon"]
8
+ s.email = 'dynamodb-interest@amazon.com'
9
+ s.homepage = "https://github.com/logstash-plugins/logstash-input-dynamodb"
10
+ s.require_paths = ["lib"]
11
+ s.platform = 'java'
12
+
13
+ # Files
14
+ s.files = `git ls-files`.split($\)
15
+ # Tests
16
+ s.test_files = s.files.grep(%r{^(test|spec|features)/})
17
+
18
+ # Special flag to let us know this is actually a logstash plugin
19
+ s.metadata = { "logstash_plugin" => "true", "logstash_group" => "input" }
20
+
21
+ # Gem dependencies
22
+ s.add_runtime_dependency "logstash-core", '>= 2.0.0', '< 3.0.0'
23
+ s.add_runtime_dependency "logstash-codec-json"
24
+ s.add_runtime_dependency "activesupport-json_encoder"
25
+ # Jar dependencies
26
+ s.requirements << "jar 'com.amazonaws:amazon-kinesis-client', '1.6.1'"
27
+ s.requirements << "jar 'log4j:log4j', '1.2.17'"
28
+ s.requirements << "jar 'com.amazonaws:aws-java-sdk-dynamodb', '1.10.27'"
29
+ s.requirements << "jar 'com.amazonaws:aws-java-sdk-core', '1.10.27'"
30
+ s.requirements << "jar 'com.amazonaws:dynamodb-import-export-tool', '1.0.0'"
31
+ s.requirements << "jar 'commons-logging:commons-logging', '1.1.3'"
32
+ s.requirements << "jar 'com.amazonaws:dynamodb-streams-kinesis-adapter', '1.0.2'"
33
+ s.requirements << "jar 'com.google.guava:guava', '15.0'"
34
+ s.add_runtime_dependency 'jar-dependencies'
35
+ # Development dependencies
36
+ s.add_development_dependency "logstash-devutils"
37
+ s.add_development_dependency "mocha"
38
+ end
@@ -0,0 +1,198 @@
1
+ # encoding: utf-8
2
+ #
3
+ #Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
4
+ #
5
+ #Licensed under the Apache License, Version 2.0 (the "License");
6
+ #you may not use this file except in compliance with the License.
7
+ #You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ #Unless required by applicable law or agreed to in writing, software
12
+ #distributed under the License is distributed on an "AS IS" BASIS,
13
+ #WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ #See the License for the specific language governing permissions and
15
+ #limitations under the License.
16
+ #
17
+ require "spec/spec_helper"
18
+
19
+ class LogStash::Inputs::TestDynamoDB < LogStash::Inputs::DynamoDB
20
+ default :codec, 'json'
21
+
22
+ private
23
+ def shutdown_count
24
+ @shutdown_count ||= 0
25
+ end
26
+
27
+ def queue_event(event, logstash_queue, host)
28
+ super(event, logstash_queue, host)
29
+ # Add additional item to plugin's queue to ensure run() flushes queue before shutting down.
30
+ # Queue the event and then shutdown, otherwise the threads would run forever
31
+ if shutdown_count == 0
32
+ @shutdown_count += 1
33
+ @queue << "additional event stuck in queue during shutdown"
34
+ raise LogStash::ShutdownSignal
35
+ end
36
+ end
37
+
38
+ def start_kcl_thread()
39
+ @queue << "some message from kcl thread calling process"
40
+ end
41
+ end
42
+
43
+ class TestParser
44
+
45
+ def parse_scan(msg)
46
+ return msg
47
+ end
48
+
49
+ def parse_stream(msg)
50
+ return msg
51
+ end
52
+
53
+ end
54
+
55
+ describe 'inputs/dynamodb' do
56
+ let (:dynamodb_client) {mock("AmazonDynamoDB::AmazonDynamoDBClient")}
57
+ let (:dynamodb_streams_client) {mock("AmazonDynamoDB::AmazonDynamoDBStreamsClient")}
58
+ let (:adapter) {mock("AmazonDynamoDB::AmazonDynamoDBStreamsAdapterClient")}
59
+ let (:parser) {mock("DynamoDBLogParser")}
60
+ let (:region_utils) {mock("RegionUtils")}
61
+
62
+ def allow_invalid_credentials(stream_status = "ENABLED", error_to_raise = nil)
63
+ AmazonDynamoDB::AmazonDynamoDBClient.expects(:new).returns(dynamodb_client)
64
+ AmazonDynamoDB::AmazonDynamoDBStreamsClient.expects(:new).returns(dynamodb_streams_client)
65
+ AmazonDynamoDB::AmazonDynamoDBStreamsAdapterClient.expects(:new).returns(adapter)
66
+ Logstash::Inputs::DynamoDB::DynamoDBLogParser.expects(:new).returns(TestParser.new())
67
+ RegionUtils.expects(:getRegionByEndpoint).with("some endpoint").returns("some region")
68
+
69
+ mock_table_description = stub
70
+ mock_table = stub
71
+ mock_key_schema = stub
72
+ mock_iterator = stub
73
+ mock_describe_stream = stub
74
+ mock_stream_description = stub
75
+ unless error_to_raise.nil?
76
+ dynamodb_client.expects(:describeTable).raises(error_to_raise)
77
+ return
78
+ end
79
+
80
+ adapter.expects(:setEndpoint).with("some streams endpoint")
81
+ dynamodb_streams_client.expects(:setEndpoint).with("some streams endpoint")
82
+ dynamodb_streams_client.expects(:describeStream).returns(mock_describe_stream)
83
+ mock_describe_stream.expects(:getStreamDescription).returns(mock_stream_description)
84
+ mock_stream_description.expects(:getStreamStatus).returns(stream_status)
85
+ mock_stream_description.expects(:getStreamViewType).returns("new_and_old_images")
86
+ mock_table.expects(:getLatestStreamArn).returns("test streamId")
87
+ dynamodb_client.expects(:setEndpoint)
88
+ dynamodb_client.expects(:describeTable).returns(mock_table_description)
89
+ mock_table_description.expects(:getTable).returns(mock_table)
90
+ mock_table.expects(:getKeySchema).returns(mock_key_schema)
91
+ mock_key_schema.expects(:iterator).returns(mock_iterator)
92
+ mock_iterator.expects(:hasNext).returns(false)
93
+
94
+ end
95
+
96
+ it "should not allow empty config" do
97
+ expect {LogStash::Plugin.lookup("input", "dynamodb").new(empty_config)}.to raise_error(LogStash::ConfigurationError)
98
+ end
99
+
100
+ it "should need endpoint" do
101
+ config = tablename
102
+ config.delete("endpoint")
103
+ expect {LogStash::Plugin.lookup("input", "dynamodb").new(config)}.to raise_error(LogStash::ConfigurationError)
104
+ end
105
+
106
+ it "should need table_name config" do
107
+ config = tablename
108
+ config.delete("table_name")
109
+ expect {LogStash::Plugin.lookup("input", "dynamodb").new(config)}.to raise_error(LogStash::ConfigurationError)
110
+ end
111
+
112
+ it "should need view_type config" do
113
+ config = tablename
114
+ config.delete("view_type")
115
+ expect {LogStash::Plugin.lookup("input", "dynamodb").new(config)}.to raise_error(LogStash::ConfigurationError)
116
+ end
117
+
118
+ it "should use default AWS credentials " do
119
+ input = LogStash::Plugin.lookup("input", "dynamodb").new(tablename)
120
+ expect(input.build_credentials()).to be_an_instance_of(Java::ComAmazonawsAuth::DefaultAWSCredentialsProviderChain)
121
+ end
122
+
123
+ it "should register correctly" do
124
+ input = LogStash::Plugin.lookup("input", "dynamodb").new(invalid_aws_credentials_config)
125
+ allow_invalid_credentials()
126
+ expect {input.register}.not_to raise_error
127
+ end
128
+
129
+ it "should create new logstash event with metadata and add to queue" do
130
+ input = LogStash::Plugin.lookup("input", "dynamodb").new(invalid_aws_credentials_config)
131
+ queue = SizedQueue.new(20)
132
+ input.queue_event("some message", queue, "some host")
133
+ expect(queue.size()).to eq(1)
134
+ event = queue.pop()
135
+ expect(event["message"]).to eq("some message")
136
+ expect(event["host"]).to eq("some host")
137
+ end
138
+
139
+ it "should start mock kcl worker thread and receive event from it, then flush additional events stuck in queue before shutting down" do
140
+ input = LogStash::Inputs::TestDynamoDB.new(invalid_aws_credentials_config.merge({'perform_scan' => false}))
141
+ allow_invalid_credentials()
142
+ input.register
143
+ queue = SizedQueue.new(20)
144
+ input.run queue
145
+ expect(queue.size()).to eq(2)
146
+ event = queue.pop()
147
+ expect(event["message"]).to eq("some message from kcl thread calling process")
148
+ event = queue.pop()
149
+ expect(event["message"]).to eq("additional event stuck in queue during shutdown")
150
+ end
151
+
152
+ it "should raise error since no active streams" do
153
+ input = LogStash::Plugin.lookup("input", "dynamodb").new(invalid_aws_credentials_config)
154
+ allow_invalid_credentials(stream_status="DISABLED")
155
+ expect {input.register}.to raise_error(LogStash::PluginLoadingError, "No streams are enabled")
156
+ end
157
+
158
+ it "should handle error for nonexistent table" do
159
+ input = LogStash::Plugin.lookup("input", "dynamodb").new(invalid_aws_credentials_config)
160
+ allow_invalid_credentials(error_to_raise=Java::ComAmazonawsServicesDynamodbv2Model::ResourceNotFoundException.new("table does not exist"))
161
+ expect {input.register}.to raise_error(LogStash::PluginLoadingError)
162
+ end
163
+
164
+ it "should allow cloudwatch metrics when specified by user" do
165
+ input = LogStash::Inputs::TestDynamoDB.new(invalid_aws_credentials_config.merge({"publish_metrics" => true}))
166
+ allow_invalid_credentials()
167
+ cloudwatch_mock = mock("Java::ComAmazonawsServicesCloudwatch::AmazonCloudWatchClient")
168
+ Java::ComAmazonawsServicesCloudwatch::AmazonCloudWatchClient.expects(:new).returns(cloudwatch_mock)
169
+
170
+ input.register
171
+ end
172
+
173
+ it "should throw error trying to perform scan with old images" do
174
+ input = LogStash::Inputs::TestDynamoDB.new(invalid_aws_credentials_config.merge({"view_type" => LogStash::Inputs::DynamoDB::VT_OLD_IMAGE, \
175
+ "perform_scan" => true}))
176
+ expect {input.register}.to raise_error(LogStash::ConfigurationError)
177
+ end
178
+
179
+ it "should throw error when view type all images and dynamodb format" do
180
+ input = LogStash::Inputs::TestDynamoDB.new(invalid_aws_credentials_config.merge({"view_type" => LogStash::Inputs::DynamoDB::VT_ALL_IMAGES, \
181
+ "log_format" => LogStash::Inputs::DynamoDB::LF_DYNAMODB}))
182
+ expect {input.register}.to raise_error(LogStash::ConfigurationError)
183
+ end
184
+
185
+ it "should throw error when view type all images and json_drop_binary format" do
186
+ input = LogStash::Inputs::TestDynamoDB.new(invalid_aws_credentials_config.merge({"view_type" => LogStash::Inputs::DynamoDB::VT_ALL_IMAGES, \
187
+ "log_format" => LogStash::Inputs::DynamoDB::LF_JSON_NO_BIN}))
188
+ expect {input.register}.to raise_error(LogStash::ConfigurationError)
189
+ end
190
+
191
+ it "should throw error when view type all images and json_binary_as_text format" do
192
+ input = LogStash::Inputs::TestDynamoDB.new(invalid_aws_credentials_config.merge({"view_type" => LogStash::Inputs::DynamoDB::VT_ALL_IMAGES, \
193
+ "log_format" => LogStash::Inputs::DynamoDB::LF_JSON_BIN_AS_TEXT}))
194
+ expect {input.register}.to raise_error(LogStash::ConfigurationError)
195
+ end
196
+
197
+
198
+ end
@@ -0,0 +1,63 @@
1
+ # encoding: utf-8
2
+ #
3
+ #Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
4
+ #
5
+ #Licensed under the Apache License, Version 2.0 (the "License");
6
+ #you may not use this file except in compliance with the License.
7
+ #You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ #Unless required by applicable law or agreed to in writing, software
12
+ #distributed under the License is distributed on an "AS IS" BASIS,
13
+ #WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ #See the License for the specific language governing permissions and
15
+ #limitations under the License.
16
+ #
17
+ require "spec/spec_helper"
18
+
19
+ class Logstash::Inputs::DynamoDB::DynamoDBLogParserTest < Logstash::Inputs::DynamoDB::DynamoDBLogParser
20
+
21
+ private
22
+ def calculate_key_size_in_bytes(record)
23
+ return 10
24
+ end
25
+
26
+ end
27
+
28
+ describe "inputs/LogParser" do
29
+ let (:object_mapper) {mock("ObjectMapper")}
30
+
31
+ before(:each) do
32
+ Java::comFasterxmlJacksonDatabind::ObjectMapper.expects(:new).returns(object_mapper)
33
+ object_mapper.expects(:setSerializationInclusion)
34
+ object_mapper.expects(:addMixInAnnotations)
35
+ end
36
+
37
+ def expect_parse_stream()
38
+ object_mapper.expects(:writeValueAsString).with(sample_stream_result).returns(sample_stream_result)
39
+ JSON.expects(:parse).with(sample_stream_result).returns(sample_stream_result)
40
+ end
41
+
42
+ def expect_parse_scan()
43
+ object_mapper.expects(:writeValueAsString).with(sample_scan_result).returns(sample_scan_result)
44
+ JSON.expects(:parse).with(sample_scan_result).returns(sample_scan_result)
45
+ end
46
+
47
+ it "should parse a scan and parse a stream the same way" do
48
+ expect_parse_stream
49
+ expect_parse_scan
50
+ parser = Logstash::Inputs::DynamoDB::DynamoDBLogParserTest.new(LogStash::Inputs::DynamoDB::VT_ALL_IMAGES, LogStash::Inputs::DynamoDB::LF_PLAIN, key_schema, "us-west-1")
51
+ scan_after_parse = parser.parse_scan(sample_scan_result, 38)
52
+ stream_after_parse = parser.parse_stream(sample_stream_result)
53
+ expect(scan_after_parse).to eq(stream_after_parse)
54
+ end
55
+
56
+ it "should drop binary values when parsing into a json with the correct configuration" do
57
+ expect_parse_scan
58
+ parser = Logstash::Inputs::DynamoDB::DynamoDBLogParserTest.new(LogStash::Inputs::DynamoDB::VT_NEW_IMAGE, LogStash::Inputs::DynamoDB::LF_JSON_NO_BIN, key_schema, "us-west-1")
59
+ result = parser.parse_scan(sample_scan_result, 38)
60
+ expect(result).to eq({"TBCZDPHPXUTOTYGP" => "sampleString"}.to_json)
61
+ end
62
+
63
+ end
@@ -0,0 +1,70 @@
1
+ # encoding: utf-8
2
+ #
3
+ #Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
4
+ #
5
+ #Licensed under the Apache License, Version 2.0 (the "License");
6
+ #you may not use this file except in compliance with the License.
7
+ #You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ #Unless required by applicable law or agreed to in writing, software
12
+ #distributed under the License is distributed on an "AS IS" BASIS,
13
+ #WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ #See the License for the specific language governing permissions and
15
+ #limitations under the License.
16
+ #
17
+ require "spec/spec_helper"
18
+
19
+ describe 'inputs/LogStashRecordProcessor' do
20
+ before(:each) do
21
+ @queue = SizedQueue.new(20)
22
+ @processor = Logstash::Inputs::DynamoDB::LogStashRecordProcessor.new(@queue)
23
+ end
24
+
25
+ it "should call setShardId when being called with a String" do
26
+ processor_with_shard = Logstash::Inputs::DynamoDB::LogStashRecordProcessor.new("test shardId")
27
+ expect(processor_with_shard.shard_id).to eq("test shardId")
28
+ end
29
+
30
+ it "should not call setShardId when being called with a queue" do
31
+ expect(@processor.queue).to eq(@queue)
32
+ expect(@processor.shard_id).to be_nil
33
+ end
34
+
35
+ it "should checkpoint when shutdown is called with reason TERMINATE" do
36
+ checkpointer = mock("checkpointer")
37
+ checkpointer.expects(:checkpoint).once
38
+ @processor.shutdown(checkpointer, ShutdownReason::TERMINATE)
39
+ end
40
+
41
+ it "should not checkpoint when shutdown is called with reason ZOMBIE" do
42
+ checkpointer = mock("checkpointer")
43
+ checkpointer.expects(:checkpoint).never
44
+ @processor.shutdown(checkpointer, ShutdownReason::ZOMBIE)
45
+ end
46
+
47
+ it "should raise error when shutdown is called with unknown reason" do
48
+ expect {@processor.shutdown("some checkpointer", "unknown reason")}.to raise_error(RuntimeError)
49
+ end
50
+
51
+ it "should translate each record into String, push them onto queue, and then checkpoint when process_records is called" do
52
+ checkpointer = mock("checkpointer")
53
+ checkpointer.expects(:checkpoint).once
54
+
55
+ records = [{"a records data" => "a records value"}, {"another records data" => "another records value"}]
56
+ @processor.process_records(records, checkpointer)
57
+ end
58
+
59
+ end
60
+
61
+ describe 'inputs/LogStashRecordProcessorFactory' do
62
+
63
+ it "should create a new factory correctly and create a new LogStashRecordProcessor when called upon" do
64
+ queue = SizedQueue.new(20)
65
+ factory = Logstash::Inputs::DynamoDB::LogStashRecordProcessorFactory.new(queue)
66
+ processor = factory.create_processor
67
+ expect(processor).to be_an_instance_of(Logstash::Inputs::DynamoDB::LogStashRecordProcessor)
68
+ end
69
+
70
+ end
@@ -0,0 +1,134 @@
1
+ # This file was generated by the `rspec --init` command. Conventionally, all
2
+ # specs live under a `spec` directory, which RSpec adds to the `$LOAD_PATH`.
3
+ # The generated `.rspec` file contains `--require spec_helper` which will cause
4
+ # this file to always be loaded, without a need to explicitly require it in any
5
+ # files.
6
+ #
7
+ # Given that it is always loaded, you are encouraged to keep this file as
8
+ # light-weight as possible. Requiring heavyweight dependencies from this file
9
+ # will add to the boot time of your test suite on EVERY test run, even for an
10
+ # individual file that may not need all of that loaded. Instead, consider making
11
+ # a separate helper file that requires the additional dependencies and performs
12
+ # the additional setup, and require it from the spec files that actually need
13
+ # it.
14
+ #
15
+ # The `.rspec` file also contains a few flags that are not defaults but that
16
+ # users commonly want.
17
+ #
18
+ # See http://rubydoc.info/gems/rspec-core/RSpec/Core/Configuration
19
+ RSpec.configure do |config|
20
+ # rspec-expectations config goes here. You can use an alternate
21
+ # assertion/expectation library such as wrong or the stdlib/minitest
22
+ # assertions if you prefer.
23
+ config.expect_with :rspec do |expectations|
24
+ # This option will default to `true` in RSpec 4. It makes the `description`
25
+ # and `failure_message` of custom matchers include text for helper methods
26
+ # defined using `chain`, e.g.:
27
+ # be_bigger_than(2).and_smaller_than(4).description
28
+ # # => "be bigger than 2 and smaller than 4"
29
+ # ...rather than:
30
+ # # => "be bigger than 2"
31
+ expectations.include_chain_clauses_in_custom_matcher_descriptions = true
32
+ end
33
+
34
+ # rspec-mocks config goes here. You can use an alternate test double
35
+ # library (such as bogus or mocha) by changing the `mock_with` option here.
36
+ config.mock_with :rspec do |mocks|
37
+ # Prevents you from mocking or stubbing a method that does not exist on
38
+ # a real object. This is generally recommended, and will default to
39
+ # `true` in RSpec 4.
40
+ mocks.verify_partial_doubles = true
41
+ end
42
+
43
+ # The settings below are suggested to provide a good initial experience
44
+ # with RSpec, but feel free to customize to your heart's content.
45
+ =begin
46
+ # These two settings work together to allow you to limit a spec run
47
+ # to individual examples or groups you care about by tagging them with
48
+ # `:focus` metadata. When nothing is tagged with `:focus`, all examples
49
+ # get run.
50
+ config.filter_run :focus
51
+ config.run_all_when_everything_filtered = true
52
+
53
+ # Allows RSpec to persist some state between runs in order to support
54
+ # the `--only-failures` and `--next-failure` CLI options. We recommend
55
+ # you configure your source control system to ignore this file.
56
+ config.example_status_persistence_file_path = "spec/examples.txt"
57
+
58
+ # Limits the available syntax to the non-monkey patched syntax that is
59
+ # recommended. For more details, see:
60
+ # - http://myronmars.to/n/dev-blog/2012/06/rspecs-new-expectation-syntax
61
+ # - http://www.teaisaweso.me/blog/2013/05/27/rspecs-new-message-expectation-syntax/
62
+ # - http://myronmars.to/n/dev-blog/2014/05/notable-changes-in-rspec-3#new__config_option_to_disable_rspeccore_monkey_patching
63
+ config.disable_monkey_patching!
64
+
65
+ # This setting enables warnings. It's recommended, but in some cases may
66
+ # be too noisy due to issues in dependencies.
67
+ config.warnings = true
68
+
69
+ # Many RSpec users commonly either run the entire suite or an individual
70
+ # file, and it's useful to allow more verbose output when running an
71
+ # individual spec file.
72
+ if config.files_to_run.one?
73
+ # Use the documentation formatter for detailed output,
74
+ # unless a formatter has already been configured
75
+ # (e.g. via a command-line flag).
76
+ config.default_formatter = 'doc'
77
+ end
78
+
79
+ # Print the 10 slowest examples and example groups at the
80
+ # end of the spec run, to help surface which specs are running
81
+ # particularly slow.
82
+ config.profile_examples = 10
83
+
84
+ # Run specs in random order to surface order dependencies. If you find an
85
+ # order dependency and want to debug it, you can fix the order by providing
86
+ # the seed, which is printed after each run.
87
+ # --seed 1234
88
+ config.order = :random
89
+
90
+ # Seed global randomization in this process using the `--seed` CLI option.
91
+ # Setting this allows you to use `--seed` to deterministically reproduce
92
+ # test failures related to randomization by passing the same `--seed` value
93
+ # as the one that triggered the failure.
94
+ Kernel.srand config.seed
95
+ =end
96
+ end
97
+
98
+ require "logstash/devutils/rspec/spec_helper"
99
+ require "logstash/inputs/DynamoDBLogParser"
100
+ require "logstash/inputs/dynamodb"
101
+ require "rspec/expectations"
102
+ require "rspec/mocks"
103
+ require "mocha"
104
+ require "java"
105
+
106
+ RSpec.configure do |config|
107
+ config.mock_with :mocha
108
+ end
109
+
110
+ def empty_config
111
+ {}
112
+ end
113
+
114
+ def tablename
115
+ {'table_name' => 'test tablename', 'view_type' => "new_and_old_images", "endpoint" => "some endpoint"}
116
+ end
117
+ def invalid_aws_credentials_config
118
+ {'table_name' => 'test tablename', "endpoint" => "some endpoint", 'aws_access_key_id' => 'invalid', 'aws_secret_access_key' => 'invalid_also', 'view_type' => "new_and_old_images", "streams_endpoint" => "some streams endpoint"}
119
+ end
120
+ def invalid_aws_credentials_config_no_endpoints
121
+ {'table_name' => 'test tablename', 'aws_access_key_id' => 'invalid', 'aws_secret_access_key' => 'invalid_also', 'view_type' => "new_and_old_images"}
122
+ end
123
+ def key_schema
124
+ ["TBCZDPHPXUTOTYGP", "some bin key"]
125
+ end
126
+ def sample_scan_result
127
+ {"TBCZDPHPXUTOTYGP" => {"S" => "sampleString"}, "some bin key" => {"B" => "actualbinval"}}
128
+ end
129
+ def sample_stream_result
130
+ {"internalObject" => {"eventID" => "0","eventName" => "INSERT","eventVersion" => "1.0", \
131
+ "eventSource" => "aws:dynamodb","awsRegion" => "us-west-1","dynamodb" => {"keys" => {"TBCZDPHPXUTOTYGP" => {"S" => "sampleString"}, \
132
+ "some bin key" => {"B" => "actualbinval"}}, "newImage" => {"TBCZDPHPXUTOTYGP" => {"S" => "sampleString"}, \
133
+ "some bin key" => {"B" => "actualbinval"}},"sequenceNumber" => "0","sizeBytes" => 48,"streamViewType" => LogStash::Inputs::DynamoDB::VT_ALL_IMAGES.upcase}}}
134
+ end