aw_datapipe 0.1.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.gitignore +9 -0
- data/Gemfile +4 -0
- data/LICENSE.txt +20 -0
- data/README.md +59 -0
- data/Rakefile +10 -0
- data/aw_datapipe.gemspec +32 -0
- data/bin/console +14 -0
- data/bin/setup +8 -0
- data/lib/aw_datapipe.rb +10 -0
- data/lib/aw_datapipe/object_hash.rb +25 -0
- data/lib/aw_datapipe/parameter_metadata.rb +15 -0
- data/lib/aw_datapipe/pipeline.rb +89 -0
- data/lib/aw_datapipe/pipeline_object.rb +63 -0
- data/lib/aw_datapipe/pipeline_serializer.rb +124 -0
- data/lib/aw_datapipe/session.rb +51 -0
- data/lib/aw_datapipe/version.rb +3 -0
- metadata +131 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA1:
|
3
|
+
metadata.gz: fe971d0b5fa7e5a8558d43e7e0940e634d3bbfeb
|
4
|
+
data.tar.gz: 9ccb16f29ef04f12ff5fdbc23d042e86d762393a
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: c26e3f06941dabdda94fa17d8dabf0edd623820b7ce8579efd7f78f51ef32c9e2f085a68c7fd022b41a11431d7ca8fc026a2838e28ec92711bcda06f85293c38
|
7
|
+
data.tar.gz: d7758fde384e2d782805b0b9b0c3fb2e0c3094f31a6d4d2863ac8e49664f211567054fd35e76cb260c68e139998dbdfef9f12ab69438b6180f345aface21d2f5
|
data/.gitignore
ADDED
data/Gemfile
ADDED
data/LICENSE.txt
ADDED
@@ -0,0 +1,20 @@
|
|
1
|
+
Copyright (c) 2017 Piers Chambers
|
2
|
+
|
3
|
+
Permission is hereby granted, free of charge, to any person obtaining
|
4
|
+
a copy of this software and associated documentation files (the
|
5
|
+
"Software"), to deal in the Software without restriction, including
|
6
|
+
without limitation the rights to use, copy, modify, merge, publish,
|
7
|
+
distribute, sublicense, and/or sell copies of the Software, and to
|
8
|
+
permit persons to whom the Software is furnished to do so, subject to
|
9
|
+
the following conditions:
|
10
|
+
|
11
|
+
The above copyright notice and this permission notice shall be
|
12
|
+
included in all copies or substantial portions of the Software.
|
13
|
+
|
14
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
15
|
+
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
16
|
+
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
17
|
+
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
18
|
+
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
19
|
+
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
20
|
+
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
data/README.md
ADDED
@@ -0,0 +1,59 @@
|
|
1
|
+
# AW Datapipe
|
2
|
+
|
3
|
+
AW Datapipe is an unofficial domain specific ruby wrapper for the
|
4
|
+
[AWS SDK](http://www.rubydoc.info/github/aws/aws-sdk-ruby) Data Pipeline API.
|
5
|
+
|
6
|
+
The primary goal is to support ruby scripts for creating and updating
|
7
|
+
pipeline definitions.
|
8
|
+
|
9
|
+
## Installation
|
10
|
+
|
11
|
+
Add this line to your application's Gemfile:
|
12
|
+
|
13
|
+
```ruby
|
14
|
+
gem 'aw_datapipe'
|
15
|
+
```
|
16
|
+
|
17
|
+
And then execute:
|
18
|
+
|
19
|
+
$ bundle
|
20
|
+
|
21
|
+
Or install it yourself as:
|
22
|
+
|
23
|
+
$ gem install aw_datapipe
|
24
|
+
|
25
|
+
## Usage
|
26
|
+
|
27
|
+
Configure credentials for AWS SDK.
|
28
|
+
|
29
|
+
```sh
|
30
|
+
export AWS_ACCESS_KEY_ID=AKIA****************
|
31
|
+
export AWS_SECRET_ACCESS_KEY=********************************
|
32
|
+
```
|
33
|
+
Use bin/console to download a pipeline definition as ruby instead of JSON.
|
34
|
+
```ruby
|
35
|
+
pipelines = AwDatapipe::Session.new
|
36
|
+
pipelines.download_definition 'df-***************', 'tmp/pipeline-definition.rb'
|
37
|
+
```
|
38
|
+
|
39
|
+
The generated script can be checked into version control, modified and executed
|
40
|
+
to update the pipeline definition.
|
41
|
+
```sh
|
42
|
+
bundle exec ruby tmp/pipeline-definition.rb
|
43
|
+
```
|
44
|
+
## Development
|
45
|
+
|
46
|
+
A live AWS account with a sample pipeline is required to run the tests.
|
47
|
+
|
48
|
+
After checking out the repo, run `bin/setup` to install dependencies. Then, run `rake test` to run the tests. You can also run `bin/console` for an interactive prompt that will allow you to experiment.
|
49
|
+
|
50
|
+
To install this gem onto your local machine, run `bundle exec rake install`. To release a new version, update the version number in `version.rb`, and then run `bundle exec rake release`, which will create a git tag for the version, push git commits and tags, and push the `.gem` file to [rubygems.org](https://rubygems.org).
|
51
|
+
|
52
|
+
## Contributing
|
53
|
+
|
54
|
+
Bug reports and pull requests are welcome on GitHub at https://github.com/varyonic/aw_datapipe.
|
55
|
+
|
56
|
+
|
57
|
+
## License
|
58
|
+
|
59
|
+
The gem is available as open source under the terms of the [MIT License](http://opensource.org/licenses/MIT).
|
data/Rakefile
ADDED
data/aw_datapipe.gemspec
ADDED
@@ -0,0 +1,32 @@
|
|
1
|
+
# coding: utf-8
|
2
|
+
lib = File.expand_path('../lib', __FILE__)
|
3
|
+
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
|
4
|
+
require 'aw_datapipe/version'
|
5
|
+
|
6
|
+
Gem::Specification.new do |spec|
|
7
|
+
spec.name = "aw_datapipe"
|
8
|
+
spec.version = AwDatapipe::VERSION
|
9
|
+
spec.authors = ["Piers Chambers"]
|
10
|
+
spec.email = ["piers@varyonic.com"]
|
11
|
+
|
12
|
+
spec.summary = "Unofficial ruby wrapper for the AWS SDK Data Pipeline API."
|
13
|
+
spec.description = "Unofficial domain specific ruby wrapper for the AWS SDK Data Pipeline API."
|
14
|
+
spec.homepage = "http://github.com/varyonic/aw_datapipe"
|
15
|
+
spec.date = "2017-05-02"
|
16
|
+
spec.license = "MIT"
|
17
|
+
|
18
|
+
spec.files = `git ls-files -z`.split("\x0").reject do |f|
|
19
|
+
f.match(%r{^(test|spec|features)/})
|
20
|
+
end
|
21
|
+
|
22
|
+
spec.bindir = "exe"
|
23
|
+
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
|
24
|
+
spec.require_paths = ["lib"]
|
25
|
+
|
26
|
+
spec.add_dependency("activesupport")
|
27
|
+
spec.add_dependency("aws-sdk", ['~> 2'])
|
28
|
+
|
29
|
+
spec.add_development_dependency "bundler", "~> 1.14"
|
30
|
+
spec.add_development_dependency "rake", "~> 10.0"
|
31
|
+
spec.add_development_dependency "minitest", "~> 5.0"
|
32
|
+
end
|
data/bin/console
ADDED
@@ -0,0 +1,14 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
|
3
|
+
require "bundler/setup"
|
4
|
+
require "aw_datapipe"
|
5
|
+
|
6
|
+
# You can add fixtures and/or initialization code here to make experimenting
|
7
|
+
# with your gem easier. You can also use a different console, if you like.
|
8
|
+
|
9
|
+
# (If you use this, don't forget to add pry to your Gemfile!)
|
10
|
+
# require "pry"
|
11
|
+
# Pry.start
|
12
|
+
|
13
|
+
require "irb"
|
14
|
+
IRB.start(__FILE__)
|
data/bin/setup
ADDED
data/lib/aw_datapipe.rb
ADDED
@@ -0,0 +1,10 @@
|
|
1
|
+
require 'active_support/inflector' # String#underscore
|
2
|
+
require 'aws-sdk'
|
3
|
+
|
4
|
+
require 'aw_datapipe/parameter_metadata'
|
5
|
+
require 'aw_datapipe/pipeline'
|
6
|
+
require 'aw_datapipe/pipeline_object'
|
7
|
+
require 'aw_datapipe/pipeline_serializer'
|
8
|
+
require 'aw_datapipe/object_hash'
|
9
|
+
require 'aw_datapipe/session'
|
10
|
+
require 'aw_datapipe/version'
|
@@ -0,0 +1,25 @@
|
|
1
|
+
module AwDatapipe
|
2
|
+
# A symbol table implemeted as a hash of objects keyed by their ids.
|
3
|
+
class ObjectHash < Hash
|
4
|
+
def initialize(*objects)
|
5
|
+
super()
|
6
|
+
self.append(*objects)
|
7
|
+
end
|
8
|
+
|
9
|
+
# Adds PipelineObjects to the symbol table
|
10
|
+
# along with any of their dependencies.
|
11
|
+
def append(*objects)
|
12
|
+
objects.each { |object| self[object.id] = object }
|
13
|
+
self
|
14
|
+
end
|
15
|
+
alias_method :<<, :append
|
16
|
+
|
17
|
+
def append_with_dependents(*objects)
|
18
|
+
objects.each do |object|
|
19
|
+
object.dependencies.each { |dep| self.append dep }
|
20
|
+
self.append object
|
21
|
+
end
|
22
|
+
self
|
23
|
+
end
|
24
|
+
end
|
25
|
+
end
|
@@ -0,0 +1,15 @@
|
|
1
|
+
module AwDatapipe
|
2
|
+
class ParameterMetadata < Struct
|
3
|
+
def source
|
4
|
+
"{ " << members.map do |member|
|
5
|
+
member_source(member)
|
6
|
+
end.join(", ") << " }"
|
7
|
+
end
|
8
|
+
|
9
|
+
def member_source(member)
|
10
|
+
value = send member
|
11
|
+
value = ?' << value.gsub("'", "\\\\'") << ?' if value.is_a?(String)
|
12
|
+
"#{member}: #{value}"
|
13
|
+
end
|
14
|
+
end
|
15
|
+
end
|
@@ -0,0 +1,89 @@
|
|
1
|
+
module AwDatapipe
|
2
|
+
|
3
|
+
class Pipeline
|
4
|
+
attr_accessor :id # AWS pipeline id
|
5
|
+
attr_accessor :uuid # Unique id
|
6
|
+
attr_reader :objects # ObjectHash[:id => PipelineObject]
|
7
|
+
attr_reader :parameter_metadata # Hash['parameterName' => Hash[type:|default:|description:|...]
|
8
|
+
attr_reader :parameter_values # Hash['parameterName' => "value"]
|
9
|
+
|
10
|
+
# objects [Array]
|
11
|
+
def initialize(objects, parameter_metadata, parameter_values)
|
12
|
+
objects.each { |object| object.pipeline = self }
|
13
|
+
@objects ||= ObjectHash.new(*objects)
|
14
|
+
@parameter_metadata, @parameter_values = parameter_metadata, parameter_values
|
15
|
+
end
|
16
|
+
|
17
|
+
def self.build(config, activities, parameter_metadata, parameter_values)
|
18
|
+
new([], parameter_metadata, parameter_values).tap { |p| p.objects.append_with_dependents(config, *activities) }
|
19
|
+
end
|
20
|
+
|
21
|
+
def configuration
|
22
|
+
objects.fetch(:default)
|
23
|
+
end
|
24
|
+
|
25
|
+
def referenced_object_ids
|
26
|
+
referenced_objects.map(&:id) << :default
|
27
|
+
end
|
28
|
+
|
29
|
+
# Collect dependencies for all objects, removing duplicates.
|
30
|
+
# @return [Array] referenced objects, with dependees before dependents.
|
31
|
+
def referenced_objects
|
32
|
+
objects.values.map(&:dependencies).flatten.uniq
|
33
|
+
end
|
34
|
+
|
35
|
+
def source
|
36
|
+
[
|
37
|
+
header_source,
|
38
|
+
objects_source,
|
39
|
+
parameter_metadata_source,
|
40
|
+
parameter_values_source,
|
41
|
+
footer_source
|
42
|
+
].join("\n")
|
43
|
+
end
|
44
|
+
|
45
|
+
def write_source(pathname)
|
46
|
+
File.write(pathname, source)
|
47
|
+
end
|
48
|
+
|
49
|
+
protected
|
50
|
+
|
51
|
+
def footer_source
|
52
|
+
<<-EOF
|
53
|
+
pipelines = AwDatapipe::Session.new
|
54
|
+
pipeline = AwDatapipe::Pipeline.build(default, activities, parameter_metadata, parameter_values)
|
55
|
+
pipeline.id = "#{id}"
|
56
|
+
pipelines.save(pipeline)
|
57
|
+
EOF
|
58
|
+
end
|
59
|
+
|
60
|
+
def header_source
|
61
|
+
<<-EOF
|
62
|
+
# Generated by aw_datapipe download_definition of #{id}
|
63
|
+
require 'aw_datapipe'
|
64
|
+
EOF
|
65
|
+
end
|
66
|
+
|
67
|
+
def objects_source
|
68
|
+
object_ids = objects.keys
|
69
|
+
unreferenced_object_ids = object_ids - referenced_object_ids
|
70
|
+
|
71
|
+
s = referenced_object_ids.map { |id| "#{id} = #{objects[id].source}" }.join("\n\n")
|
72
|
+
s << "\n\nactivities = [\n"
|
73
|
+
s << unreferenced_object_ids.map { |id| " #{objects[id].source(2)}" }.join(",\n")
|
74
|
+
s << "\n]"
|
75
|
+
end
|
76
|
+
|
77
|
+
def parameter_values_source
|
78
|
+
"parameter_values = {\n " << parameter_values.sort.map do |key, value|
|
79
|
+
"\"#{key}\" => #{value.inspect}"
|
80
|
+
end.join(",\n ") << "\n}\n"
|
81
|
+
end
|
82
|
+
|
83
|
+
def parameter_metadata_source
|
84
|
+
"parameter_metadata = {\n " << parameter_metadata.sort.map do |key, value|
|
85
|
+
"\"#{key}\" => #{value.source}"
|
86
|
+
end.join(",\n ") << "\n}\n"
|
87
|
+
end
|
88
|
+
end
|
89
|
+
end
|
@@ -0,0 +1,63 @@
|
|
1
|
+
module AwDatapipe
|
2
|
+
class PipelineObject < Struct
|
3
|
+
attr_accessor :pipeline
|
4
|
+
|
5
|
+
def self.build(params)
|
6
|
+
new.tap do |struct|
|
7
|
+
params.each_pair { |k, v| struct.send "#{k}=", v }
|
8
|
+
end
|
9
|
+
end
|
10
|
+
|
11
|
+
# Iterates through struct members, recursively collecting any PipelineObjects.
|
12
|
+
# Recursion ensures dependencies sorted before dependents.
|
13
|
+
def dependencies
|
14
|
+
(members - [:id]).each_with_object([]) do |attr_name, depends|
|
15
|
+
value = send(attr_name)
|
16
|
+
value = pipeline.objects.fetch(value) if value.is_a?(Symbol)
|
17
|
+
depends << value.dependencies << value if value.is_a?(PipelineObject)
|
18
|
+
end.flatten
|
19
|
+
end
|
20
|
+
|
21
|
+
def demodulized_class_name
|
22
|
+
self.class.name.split('::').last
|
23
|
+
end
|
24
|
+
|
25
|
+
def to_hash
|
26
|
+
Hash[each_pair.to_a]
|
27
|
+
end
|
28
|
+
|
29
|
+
def source(indent_level = 1)
|
30
|
+
"#{self.class.name}.build(\n" << indent(indent_level) << members.map do |member|
|
31
|
+
member_source(member)
|
32
|
+
end.join(",\n" << indent(indent_level)) << ")"
|
33
|
+
end
|
34
|
+
|
35
|
+
protected
|
36
|
+
|
37
|
+
def indent(indent_level)
|
38
|
+
" " * 2 * indent_level
|
39
|
+
end
|
40
|
+
|
41
|
+
def member_source(member)
|
42
|
+
value = send member
|
43
|
+
value = ?' << value.gsub("'", "\\\\'") << ?' if value.is_a?(String)
|
44
|
+
value = ":#{value}" if member == :id
|
45
|
+
"#{member}: #{value}"
|
46
|
+
end
|
47
|
+
end
|
48
|
+
|
49
|
+
Configuration = PipelineObject.new(:failure_and_rerun_mode, :id, :name, :pipeline_log_uri, :resource_role, :role, :schedule, :schedule_type)
|
50
|
+
Schedule = PipelineObject.new(:id, :name, :period, :start_date_time)
|
51
|
+
|
52
|
+
Ec2Resource = PipelineObject.new(:action_on_task_failure, :id, :instance_type, :name, :security_group_ids, :subnet_id, :terminate_after)
|
53
|
+
S3DataNode = PipelineObject.new(:directory_path, :id, :name)
|
54
|
+
|
55
|
+
JdbcDatabase = PipelineObject.new(:_password, :connection_string, :id, :jdbc_driver_class, :name, :username)
|
56
|
+
SqlDataNode = PipelineObject.new(:database, :id, :name, :select_query, :table)
|
57
|
+
CopyActivity = PipelineObject.new(:id, :input, :name, :output, :runs_on)
|
58
|
+
|
59
|
+
RedshiftDatabase = PipelineObject.new(:_password, :connection_string, :database_name, :id, :name, :username)
|
60
|
+
RedshiftDataNode = PipelineObject.new(:create_table_sql, :database, :id, :name, :primary_keys, :schema_name, :table_name)
|
61
|
+
RedshiftCopyActivity = PipelineObject.new(:id, :input, :insert_mode, :name, :output, :runs_on)
|
62
|
+
|
63
|
+
end
|
@@ -0,0 +1,124 @@
|
|
1
|
+
module AwDatapipe
|
2
|
+
# Converts a pipeline into a format that can be submitted to the AWS client SDK.
|
3
|
+
class PipelineSerializer
|
4
|
+
def marshal(pipeline)
|
5
|
+
{
|
6
|
+
pipeline_id: pipeline.id,
|
7
|
+
pipeline_objects: marshal_pipeline_objects(pipeline),
|
8
|
+
parameter_objects: marshal_parameter_objects(pipeline.parameter_metadata),
|
9
|
+
parameter_values: marshal_parameter_values(pipeline.parameter_values)
|
10
|
+
}
|
11
|
+
end
|
12
|
+
|
13
|
+
def unmarshal(aws_definition)
|
14
|
+
# pipeline.aws_definition = aws_definition # for troubleshooting
|
15
|
+
objects = unmarshal_pipeline_objects(aws_definition)
|
16
|
+
parameter_metadata = unmarshal_parameter_objects(aws_definition)
|
17
|
+
parameter_values = unmarshal_parameter_values(aws_definition)
|
18
|
+
|
19
|
+
Pipeline.new(objects, parameter_metadata, parameter_values)
|
20
|
+
end
|
21
|
+
|
22
|
+
protected
|
23
|
+
|
24
|
+
def marshal_pipeline_objects(pipeline)
|
25
|
+
# marshal referenced objects before unreferenced.
|
26
|
+
referenced_object_ids = pipeline.referenced_object_ids
|
27
|
+
unreferenced_object_ids = pipeline.objects.keys - referenced_object_ids
|
28
|
+
ids = (referenced_object_ids + unreferenced_object_ids)
|
29
|
+
|
30
|
+
ids.each_with_object([]) do |id, out|
|
31
|
+
out << marshal_pipeline_object(pipeline.objects[id])
|
32
|
+
end
|
33
|
+
end
|
34
|
+
|
35
|
+
def marshal_pipeline_object(pipeline_object)
|
36
|
+
type = pipeline_object.demodulized_class_name
|
37
|
+
hash = pipeline_object.to_hash
|
38
|
+
id = hash.delete(:id)
|
39
|
+
name = hash.delete(:name)
|
40
|
+
fields = hash_to_fields(hash)
|
41
|
+
fields << { key: 'type', string_value: type } unless type == 'Configuration'
|
42
|
+
Hash[id: camelize(id), name: name, fields: fields]
|
43
|
+
end
|
44
|
+
|
45
|
+
# @return Array PipelineObject subclass instance.
|
46
|
+
def unmarshal_pipeline_objects(aws_definition)
|
47
|
+
aws_definition.pipeline_objects.map do |aws_struct|
|
48
|
+
unmarshal_pipeline_object(aws_struct)
|
49
|
+
end
|
50
|
+
end
|
51
|
+
|
52
|
+
# @return PipelineObject subclass instance.
|
53
|
+
def unmarshal_pipeline_object(aws_struct)
|
54
|
+
attributes = fields_to_hash(aws_struct.fields)
|
55
|
+
type = attributes.delete(:type) || 'Configuration'
|
56
|
+
attributes.merge!(id: symbolize(aws_struct.id), name: aws_struct.name)
|
57
|
+
|
58
|
+
klass = AwDatapipe.const_defined?(type, false) ?
|
59
|
+
AwDatapipe.const_get(type, false) :
|
60
|
+
AwDatapipe.const_set(type, PipelineObject.new(*attributes.keys.sort))
|
61
|
+
|
62
|
+
klass.new(*attributes.sort.map(&:last)) # pass values sorted by keys
|
63
|
+
end
|
64
|
+
|
65
|
+
def fields_to_hash(fields)
|
66
|
+
fields.each_with_object({}) do |field, hash|
|
67
|
+
hash[symbolize field.key] = field.string_value || field.ref_value.underscore.to_sym
|
68
|
+
end
|
69
|
+
end
|
70
|
+
|
71
|
+
def hash_to_fields(hash)
|
72
|
+
hash.keys.map do |key|
|
73
|
+
PipelineObject === hash[key] ?
|
74
|
+
{ key: camelize(key, :lower), ref_value: camelize(hash[key].id) } :
|
75
|
+
{ key: camelize(key, :lower), string_value: hash[key] }
|
76
|
+
end
|
77
|
+
end
|
78
|
+
|
79
|
+
# Convert string to a rubyish variable name.
|
80
|
+
def symbolize(key)
|
81
|
+
key.underscore.gsub('*','_').to_sym
|
82
|
+
end
|
83
|
+
|
84
|
+
# Convert symbol back to AWSish name.
|
85
|
+
def camelize(key, term = :upper)
|
86
|
+
key.to_s.sub(/^\_/, '*').camelize(term)
|
87
|
+
end
|
88
|
+
|
89
|
+
def marshal_parameter_objects(parameter_metadata)
|
90
|
+
parameter_metadata.map { |key, hash| marshal_parameter_object(key, hash) }
|
91
|
+
end
|
92
|
+
|
93
|
+
def marshal_parameter_object(key, hash)
|
94
|
+
out = []
|
95
|
+
hash.each_pair do |k, v|
|
96
|
+
out << { key: k, string_value: v }
|
97
|
+
end
|
98
|
+
{ id: key, attributes: out }
|
99
|
+
end
|
100
|
+
|
101
|
+
def unmarshal_parameter_objects(aws_definition)
|
102
|
+
aws_definition.parameter_objects.each_with_object({}) do |object, hash|
|
103
|
+
klass = ParameterMetadata.new(*object.attributes.map(&:key).map(&:to_sym))
|
104
|
+
hash[object.id] = object.attributes.each_with_object(klass.new) do |attribute, struct|
|
105
|
+
struct.send "#{attribute.key}=", attribute.string_value
|
106
|
+
end
|
107
|
+
end
|
108
|
+
end
|
109
|
+
|
110
|
+
def marshal_parameter_values(parameter_values)
|
111
|
+
out = []
|
112
|
+
parameter_values.each_pair do |id, value|
|
113
|
+
out << { id: id, string_value: value }
|
114
|
+
end
|
115
|
+
out
|
116
|
+
end
|
117
|
+
|
118
|
+
def unmarshal_parameter_values(aws_definition)
|
119
|
+
aws_definition.parameter_values.each_with_object({}) do |value, hash|
|
120
|
+
hash[value.id] = value.string_value
|
121
|
+
end
|
122
|
+
end
|
123
|
+
end
|
124
|
+
end
|
@@ -0,0 +1,51 @@
|
|
1
|
+
require 'securerandom'
|
2
|
+
|
3
|
+
module AwDatapipe
|
4
|
+
class Session
|
5
|
+
def aws
|
6
|
+
@aws ||= Aws::DataPipeline::Client.new
|
7
|
+
end
|
8
|
+
|
9
|
+
def download_definition(id, dir)
|
10
|
+
fetch(id).write_source(dir)
|
11
|
+
end
|
12
|
+
|
13
|
+
# name [String] required
|
14
|
+
# description [String] (optional)
|
15
|
+
# unique_id [String] (default: uuid)
|
16
|
+
def create(name, description, unique_id = SecureRandom.uuid)
|
17
|
+
resp = aws.create_pipeline(name: name, unique_id: unique_id)
|
18
|
+
resp.pipeline_id
|
19
|
+
end
|
20
|
+
|
21
|
+
def keys
|
22
|
+
@keys ||= begin
|
23
|
+
resp = aws.list_pipelines
|
24
|
+
id_names = resp.pipeline_id_list
|
25
|
+
id_names.map(&:id)
|
26
|
+
end
|
27
|
+
end
|
28
|
+
|
29
|
+
def fetch(key)
|
30
|
+
resp = aws.get_pipeline_definition(pipeline_id: key)
|
31
|
+
serializer.unmarshal(resp).tap { |p| p.id = key }
|
32
|
+
end
|
33
|
+
|
34
|
+
def serializer
|
35
|
+
@serializer = PipelineSerializer.new
|
36
|
+
end
|
37
|
+
|
38
|
+
def save(pipeline)
|
39
|
+
resp = aws.put_pipeline_definition(serializer.marshal(pipeline))
|
40
|
+
if resp.errored
|
41
|
+
resp.validation_errors.each do |error|
|
42
|
+
puts "Error in #{error.id}: #{error.errors.inspect}"
|
43
|
+
end
|
44
|
+
end
|
45
|
+
resp.validation_warnings.each do |warning|
|
46
|
+
puts "Warning in #{warning.id}: #{warning.warnings.inspect}"
|
47
|
+
end
|
48
|
+
!resp.errored
|
49
|
+
end
|
50
|
+
end
|
51
|
+
end
|
metadata
ADDED
@@ -0,0 +1,131 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: aw_datapipe
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 0.1.0
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- Piers Chambers
|
8
|
+
autorequire:
|
9
|
+
bindir: exe
|
10
|
+
cert_chain: []
|
11
|
+
date: 2017-05-02 00:00:00.000000000 Z
|
12
|
+
dependencies:
|
13
|
+
- !ruby/object:Gem::Dependency
|
14
|
+
name: activesupport
|
15
|
+
requirement: !ruby/object:Gem::Requirement
|
16
|
+
requirements:
|
17
|
+
- - ">="
|
18
|
+
- !ruby/object:Gem::Version
|
19
|
+
version: '0'
|
20
|
+
type: :runtime
|
21
|
+
prerelease: false
|
22
|
+
version_requirements: !ruby/object:Gem::Requirement
|
23
|
+
requirements:
|
24
|
+
- - ">="
|
25
|
+
- !ruby/object:Gem::Version
|
26
|
+
version: '0'
|
27
|
+
- !ruby/object:Gem::Dependency
|
28
|
+
name: aws-sdk
|
29
|
+
requirement: !ruby/object:Gem::Requirement
|
30
|
+
requirements:
|
31
|
+
- - "~>"
|
32
|
+
- !ruby/object:Gem::Version
|
33
|
+
version: '2'
|
34
|
+
type: :runtime
|
35
|
+
prerelease: false
|
36
|
+
version_requirements: !ruby/object:Gem::Requirement
|
37
|
+
requirements:
|
38
|
+
- - "~>"
|
39
|
+
- !ruby/object:Gem::Version
|
40
|
+
version: '2'
|
41
|
+
- !ruby/object:Gem::Dependency
|
42
|
+
name: bundler
|
43
|
+
requirement: !ruby/object:Gem::Requirement
|
44
|
+
requirements:
|
45
|
+
- - "~>"
|
46
|
+
- !ruby/object:Gem::Version
|
47
|
+
version: '1.14'
|
48
|
+
type: :development
|
49
|
+
prerelease: false
|
50
|
+
version_requirements: !ruby/object:Gem::Requirement
|
51
|
+
requirements:
|
52
|
+
- - "~>"
|
53
|
+
- !ruby/object:Gem::Version
|
54
|
+
version: '1.14'
|
55
|
+
- !ruby/object:Gem::Dependency
|
56
|
+
name: rake
|
57
|
+
requirement: !ruby/object:Gem::Requirement
|
58
|
+
requirements:
|
59
|
+
- - "~>"
|
60
|
+
- !ruby/object:Gem::Version
|
61
|
+
version: '10.0'
|
62
|
+
type: :development
|
63
|
+
prerelease: false
|
64
|
+
version_requirements: !ruby/object:Gem::Requirement
|
65
|
+
requirements:
|
66
|
+
- - "~>"
|
67
|
+
- !ruby/object:Gem::Version
|
68
|
+
version: '10.0'
|
69
|
+
- !ruby/object:Gem::Dependency
|
70
|
+
name: minitest
|
71
|
+
requirement: !ruby/object:Gem::Requirement
|
72
|
+
requirements:
|
73
|
+
- - "~>"
|
74
|
+
- !ruby/object:Gem::Version
|
75
|
+
version: '5.0'
|
76
|
+
type: :development
|
77
|
+
prerelease: false
|
78
|
+
version_requirements: !ruby/object:Gem::Requirement
|
79
|
+
requirements:
|
80
|
+
- - "~>"
|
81
|
+
- !ruby/object:Gem::Version
|
82
|
+
version: '5.0'
|
83
|
+
description: Unofficial domain specific ruby wrapper for the AWS SDK Data Pipeline
|
84
|
+
API.
|
85
|
+
email:
|
86
|
+
- piers@varyonic.com
|
87
|
+
executables: []
|
88
|
+
extensions: []
|
89
|
+
extra_rdoc_files: []
|
90
|
+
files:
|
91
|
+
- ".gitignore"
|
92
|
+
- Gemfile
|
93
|
+
- LICENSE.txt
|
94
|
+
- README.md
|
95
|
+
- Rakefile
|
96
|
+
- aw_datapipe.gemspec
|
97
|
+
- bin/console
|
98
|
+
- bin/setup
|
99
|
+
- lib/aw_datapipe.rb
|
100
|
+
- lib/aw_datapipe/object_hash.rb
|
101
|
+
- lib/aw_datapipe/parameter_metadata.rb
|
102
|
+
- lib/aw_datapipe/pipeline.rb
|
103
|
+
- lib/aw_datapipe/pipeline_object.rb
|
104
|
+
- lib/aw_datapipe/pipeline_serializer.rb
|
105
|
+
- lib/aw_datapipe/session.rb
|
106
|
+
- lib/aw_datapipe/version.rb
|
107
|
+
homepage: http://github.com/varyonic/aw_datapipe
|
108
|
+
licenses:
|
109
|
+
- MIT
|
110
|
+
metadata: {}
|
111
|
+
post_install_message:
|
112
|
+
rdoc_options: []
|
113
|
+
require_paths:
|
114
|
+
- lib
|
115
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
116
|
+
requirements:
|
117
|
+
- - ">="
|
118
|
+
- !ruby/object:Gem::Version
|
119
|
+
version: '0'
|
120
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
121
|
+
requirements:
|
122
|
+
- - ">="
|
123
|
+
- !ruby/object:Gem::Version
|
124
|
+
version: '0'
|
125
|
+
requirements: []
|
126
|
+
rubyforge_project:
|
127
|
+
rubygems_version: 2.6.11
|
128
|
+
signing_key:
|
129
|
+
specification_version: 4
|
130
|
+
summary: Unofficial ruby wrapper for the AWS SDK Data Pipeline API.
|
131
|
+
test_files: []
|