pipedream 0.1.0 → 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.cody/buildspec.yml +9 -0
- data/.cody/project.rb +4 -0
- data/.gitignore +16 -11
- data/.gitmodules +9 -0
- data/.rspec +1 -1
- data/CHANGELOG.md +34 -0
- data/Gemfile +3 -1
- data/Gemfile.lock +112 -0
- data/Guardfile +19 -0
- data/LICENSE.txt +18 -17
- data/README.md +55 -21
- data/Rakefile +10 -2
- data/exe/pipe +14 -0
- data/exe/pipedream +14 -0
- data/lib/pipedream/autoloader.rb +21 -0
- data/lib/pipedream/aws_services/helpers.rb +71 -0
- data/lib/pipedream/aws_services.rb +20 -0
- data/lib/pipedream/build.rb +13 -0
- data/lib/pipedream/cli.rb +60 -0
- data/lib/pipedream/command.rb +82 -0
- data/lib/pipedream/completer/script.rb +6 -0
- data/lib/pipedream/completer/script.sh +10 -0
- data/lib/pipedream/completer.rb +159 -0
- data/lib/pipedream/core.rb +63 -0
- data/lib/pipedream/create.rb +12 -0
- data/lib/pipedream/delete.rb +27 -0
- data/lib/pipedream/deploy.rb +40 -0
- data/lib/pipedream/dsl/pipeline/approve.rb +34 -0
- data/lib/pipedream/dsl/pipeline/codebuild.rb +57 -0
- data/lib/pipedream/dsl/pipeline/github.rb +42 -0
- data/lib/pipedream/dsl/pipeline.rb +37 -0
- data/lib/pipedream/dsl/role.rb +50 -0
- data/lib/pipedream/dsl/schedule.rb +30 -0
- data/lib/pipedream/dsl/sns.rb +15 -0
- data/lib/pipedream/dsl/ssm.rb +22 -0
- data/lib/pipedream/dsl/webhook.rb +27 -0
- data/lib/pipedream/evaluate.rb +47 -0
- data/lib/pipedream/help/completion.md +22 -0
- data/lib/pipedream/help/completion_script.md +3 -0
- data/lib/pipedream/help/deploy.md +54 -0
- data/lib/pipedream/help/start.md +20 -0
- data/lib/pipedream/help.rb +9 -0
- data/lib/pipedream/init.rb +68 -0
- data/lib/pipedream/pipeline/s3_bucket.rb +88 -0
- data/lib/pipedream/pipeline.rb +61 -0
- data/lib/pipedream/role.rb +181 -0
- data/lib/pipedream/schedule.rb +99 -0
- data/lib/pipedream/sequence.rb +66 -0
- data/lib/pipedream/setting.rb +82 -0
- data/lib/pipedream/sns.rb +43 -0
- data/lib/pipedream/stack.rb +95 -0
- data/lib/pipedream/start.rb +84 -0
- data/lib/pipedream/update.rb +12 -0
- data/lib/pipedream/version.rb +1 -1
- data/lib/pipedream/webhook.rb +60 -0
- data/lib/pipedream.rb +18 -1
- data/lib/template/.pipedream/pipeline.rb.tt +40 -0
- data/lib/template/.pipedream/schedule.rb +3 -0
- data/lib/template/.pipedream/settings.yml +9 -0
- data/lib/template/.pipedream/sns.rb +14 -0
- data/pipedream.gemspec +25 -14
- data/vendor/aws_data/CHANGELOG.md +7 -0
- data/vendor/aws_data/Gemfile +4 -0
- data/vendor/aws_data/Gemfile.lock +48 -0
- data/vendor/aws_data/LICENSE.txt +21 -0
- data/vendor/aws_data/README.md +42 -0
- data/vendor/aws_data/Rakefile +6 -0
- data/vendor/aws_data/aws_data.gemspec +30 -0
- data/{bin → vendor/aws_data/bin}/console +1 -1
- data/{bin → vendor/aws_data/bin}/setup +0 -0
- data/vendor/aws_data/lib/aws_data/version.rb +3 -0
- data/vendor/aws_data/lib/aws_data.rb +91 -0
- data/vendor/aws_data/spec/aws_data_spec.rb +5 -0
- data/vendor/aws_data/spec/spec_helper.rb +14 -0
- data/vendor/cfn-status/Gemfile +4 -0
- data/vendor/cfn-status/Gemfile.lock +49 -0
- data/vendor/cfn-status/LICENSE.txt +21 -0
- data/vendor/cfn-status/README.md +56 -0
- data/vendor/cfn-status/Rakefile +6 -0
- data/vendor/cfn-status/bin/console +14 -0
- data/vendor/cfn-status/bin/setup +8 -0
- data/vendor/cfn-status/cfn-status.gemspec +30 -0
- data/vendor/cfn-status/lib/cfn/aws_service.rb +56 -0
- data/vendor/cfn-status/lib/cfn/status/version.rb +5 -0
- data/vendor/cfn-status/lib/cfn/status.rb +220 -0
- data/vendor/cfn-status/spec/cfn/status_spec.rb +81 -0
- data/vendor/cfn-status/spec/fixtures/cfn/stack-events-complete.json +1080 -0
- data/vendor/cfn-status/spec/fixtures/cfn/stack-events-in-progress.json +1080 -0
- data/vendor/cfn-status/spec/fixtures/cfn/stack-events-update-rollback-complete.json +1086 -0
- data/vendor/cfn-status/spec/spec_helper.rb +14 -0
- data/vendor/cfn_camelizer/CHANGELOG.md +10 -0
- data/vendor/cfn_camelizer/Gemfile +4 -0
- data/vendor/cfn_camelizer/LICENSE.txt +21 -0
- data/vendor/cfn_camelizer/README.md +40 -0
- data/vendor/cfn_camelizer/Rakefile +6 -0
- data/vendor/cfn_camelizer/bin/console +14 -0
- data/vendor/cfn_camelizer/bin/setup +8 -0
- data/vendor/cfn_camelizer/cfn_camelizer.gemspec +32 -0
- data/vendor/cfn_camelizer/lib/camelizer.yml +27 -0
- data/vendor/cfn_camelizer/lib/cfn_camelizer/version.rb +3 -0
- data/vendor/cfn_camelizer/lib/cfn_camelizer.rb +92 -0
- data/vendor/cfn_camelizer/spec/cfn_camelizer_spec.rb +79 -0
- data/vendor/cfn_camelizer/spec/spec_helper.rb +14 -0
- metadata +295 -23
- data/.travis.yml +0 -7
@@ -0,0 +1,27 @@
|
|
1
|
+
module Pipedream::Dsl
|
2
|
+
module Webhook
|
3
|
+
include Ssm
|
4
|
+
|
5
|
+
# https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codepipeline-webhook.html
|
6
|
+
PROPERTIES = %w[
|
7
|
+
authentication
|
8
|
+
authentication_configuration
|
9
|
+
filters
|
10
|
+
name
|
11
|
+
register_with_third_party
|
12
|
+
target_action
|
13
|
+
target_pipeline
|
14
|
+
target_pipeline_version
|
15
|
+
]
|
16
|
+
PROPERTIES.each do |prop|
|
17
|
+
define_method(prop) do |v|
|
18
|
+
@properties[prop.to_sym] = v
|
19
|
+
end
|
20
|
+
end
|
21
|
+
|
22
|
+
def secret_token(v)
|
23
|
+
@secret_token = v
|
24
|
+
end
|
25
|
+
alias_method :github_token, :secret_token
|
26
|
+
end
|
27
|
+
end
|
@@ -0,0 +1,47 @@
|
|
1
|
+
module Pipedream
|
2
|
+
module Evaluate
|
3
|
+
def evaluate(path)
|
4
|
+
source_code = IO.read(path)
|
5
|
+
begin
|
6
|
+
instance_eval(source_code, path)
|
7
|
+
rescue Exception => e
|
8
|
+
if e.class == SystemExit # allow exit to happen normally
|
9
|
+
raise
|
10
|
+
else
|
11
|
+
task_definition_error(e)
|
12
|
+
puts "\nFull error:"
|
13
|
+
raise
|
14
|
+
end
|
15
|
+
end
|
16
|
+
end
|
17
|
+
|
18
|
+
private
|
19
|
+
# Prints out a user friendly task_definition error message
|
20
|
+
def task_definition_error(e)
|
21
|
+
error_info = e.backtrace.first
|
22
|
+
path, line_no, _ = error_info.split(':')
|
23
|
+
line_no = line_no.to_i
|
24
|
+
puts "Error evaluating #{path}:".color(:red)
|
25
|
+
puts e.message
|
26
|
+
puts "Here's the line in #{path} with the error:\n\n"
|
27
|
+
|
28
|
+
contents = IO.read(path)
|
29
|
+
content_lines = contents.split("\n")
|
30
|
+
context = 5 # lines of context
|
31
|
+
top, bottom = [line_no-context-1, 0].max, line_no+context-1
|
32
|
+
spacing = content_lines.size.to_s.size
|
33
|
+
content_lines[top..bottom].each_with_index do |line_content, index|
|
34
|
+
line_number = top+index+1
|
35
|
+
if line_number == line_no
|
36
|
+
printf("%#{spacing}d %s\n".color(:red), line_number, line_content)
|
37
|
+
else
|
38
|
+
printf("%#{spacing}d %s\n", line_number, line_content)
|
39
|
+
end
|
40
|
+
end
|
41
|
+
end
|
42
|
+
|
43
|
+
def lookup_codepipeline_file(name)
|
44
|
+
[".pipedream", @options[:type], name].compact.join("/")
|
45
|
+
end
|
46
|
+
end
|
47
|
+
end
|
@@ -0,0 +1,22 @@
|
|
1
|
+
Example:
|
2
|
+
|
3
|
+
pipe completion
|
4
|
+
|
5
|
+
Prints words for TAB auto-completion.
|
6
|
+
|
7
|
+
Examples:
|
8
|
+
|
9
|
+
pipe completion
|
10
|
+
pipe completion hello
|
11
|
+
pipe completion hello name
|
12
|
+
|
13
|
+
To enable, TAB auto-completion add the following to your profile:
|
14
|
+
|
15
|
+
eval $(pipe completion_script)
|
16
|
+
|
17
|
+
Auto-completion example usage:
|
18
|
+
|
19
|
+
pipe [TAB]
|
20
|
+
pipe hello [TAB]
|
21
|
+
pipe hello name [TAB]
|
22
|
+
pipe hello name --[TAB]
|
@@ -0,0 +1,54 @@
|
|
1
|
+
Examples:
|
2
|
+
|
3
|
+
pipe deploy
|
4
|
+
pipe deploy demo # explicitly specify pipeline name
|
5
|
+
pipe deploy demo -b mybranch # specify git branch
|
6
|
+
|
7
|
+
The pipeline is generated from the DSL and created with CloudFormation. The files that the DSL evaluates are in the `.pipedream` folder:
|
8
|
+
|
9
|
+
.pipedream/pipeline.rb
|
10
|
+
.pipedream/role.rb
|
11
|
+
.pipedream/schedule.rb
|
12
|
+
.pipedream/webhook.rb
|
13
|
+
|
14
|
+
To create the CodePipeline pipeline, you run:
|
15
|
+
|
16
|
+
pipe deploy
|
17
|
+
|
18
|
+
You'll see output that looks something like this:
|
19
|
+
|
20
|
+
$ pipe deploy
|
21
|
+
Generated CloudFormation template at /tmp/codepipeline.yml
|
22
|
+
Deploying stack demo-pipe with CodePipeline project demo
|
23
|
+
Creating stack demo-pipe. Check CloudFormation console for status.
|
24
|
+
Stack name demo-pipe status CREATE_IN_PROGRESS
|
25
|
+
Here's the CloudFormation url to check for more details https://console.aws.amazon.com/cloudformation/home?region=us-west-2#/stacks
|
26
|
+
Waiting for stack to complete
|
27
|
+
04:14:03AM CREATE_IN_PROGRESS AWS::CloudFormation::Stack demo-pipe User Initiated
|
28
|
+
04:14:06AM CREATE_IN_PROGRESS AWS::IAM::Role IamRole
|
29
|
+
04:14:07AM CREATE_IN_PROGRESS AWS::IAM::Role IamRole Resource creation Initiated
|
30
|
+
04:14:25AM CREATE_COMPLETE AWS::IAM::Role IamRole
|
31
|
+
04:14:28AM CREATE_IN_PROGRESS AWS::CodePipeline::Pipeline Pipeline
|
32
|
+
04:14:29AM CREATE_IN_PROGRESS AWS::CodePipeline::Pipeline Pipeline Resource creation Initiated
|
33
|
+
04:14:29AM CREATE_COMPLETE AWS::CodePipeline::Pipeline Pipeline
|
34
|
+
04:14:31AM CREATE_IN_PROGRESS AWS::CodePipeline::Webhook Webhook
|
35
|
+
04:14:33AM CREATE_IN_PROGRESS AWS::CodePipeline::Webhook Webhook Resource creation Initiated
|
36
|
+
04:14:33AM CREATE_COMPLETE AWS::CodePipeline::Webhook Webhook
|
37
|
+
04:14:35AM CREATE_COMPLETE AWS::CloudFormation::Stack demo-pipe
|
38
|
+
Stack success status: CREATE_COMPLETE
|
39
|
+
Time took for stack deployment: 35s.
|
40
|
+
$
|
41
|
+
|
42
|
+
## Explicit Pipeline Name
|
43
|
+
|
44
|
+
By default, the pipeline name is inferred and is the parent folder that you are within. You can explicitly specify the pipeline name as the first CLI argument:
|
45
|
+
|
46
|
+
pipe deploy my-pipeline
|
47
|
+
|
48
|
+
## Specify Git Branch
|
49
|
+
|
50
|
+
It is useful to build pipelines with different source git branches. You can pass a `--branch` option to the `pipe deploy` command. The cli `—-branch` option always takes the highest precedence. Example:
|
51
|
+
|
52
|
+
pipe deploy my-pipeline --branch my-branch
|
53
|
+
|
54
|
+
Note: When you specify a branch pipedream actually first updates the pipeline before starting the pipeline execution. This is done because CodePipeline does not natively support specifying the branch. It is discussed more here: [Using Different Branches]({% link _docs/examples/different-branches.md %}).
|
@@ -0,0 +1,20 @@
|
|
1
|
+
You can start a pipeline with the `pipe start` command. Here's an example:
|
2
|
+
|
3
|
+
$ pipe start
|
4
|
+
Pipeline started: demo
|
5
|
+
Please check the CodePipeline console for the status.
|
6
|
+
CodePipeline Console: https://us-west-2.console.aws.amazon.com/codesuite/codepipeline/pipelines/demo/view
|
7
|
+
Pipeline cli: aws codepipeline get-pipeline-execution --pipeline-execution-id 02579d64-9271-4edc-aa45-bc9629d732bb --pipeline-name demo
|
8
|
+
$
|
9
|
+
|
10
|
+
## Specifying Code Branch
|
11
|
+
|
12
|
+
If you would like start a build using a specific code branch you can use the `--branch` or `-b` option. Example:
|
13
|
+
|
14
|
+
pipe start -b feature-branch
|
15
|
+
|
16
|
+
## AWS CLI Equivalent
|
17
|
+
|
18
|
+
The `pipe start` command is a simple wrapper to the AWS API with the ruby sdk. You can also start pipelines with the `aws codepipeline` cli. Here's the equivalent CLI command:
|
19
|
+
|
20
|
+
aws codepipeline start-pipeline-execution --name demo
|
@@ -0,0 +1,68 @@
|
|
1
|
+
module Pipedream
|
2
|
+
class Init < Sequence
|
3
|
+
# Ugly, this is how I can get the options from to match with this Thor::Group
|
4
|
+
def self.cli_options
|
5
|
+
[
|
6
|
+
[:name, desc: "CodePipeline project name."],
|
7
|
+
[:mode, desc: "Modes: light or full", default: "light" ],
|
8
|
+
[:force, type: :boolean, desc: "Bypass overwrite are you sure prompt for existing files."],
|
9
|
+
[:template, desc: "Custom template to use."],
|
10
|
+
[:template_mode, desc: "Template mode: replace or additive."],
|
11
|
+
]
|
12
|
+
end
|
13
|
+
cli_options.each { |o| class_option(*o) }
|
14
|
+
|
15
|
+
def setup_template_repo
|
16
|
+
return unless @options[:template]&.include?('/')
|
17
|
+
|
18
|
+
sync_template_repo
|
19
|
+
end
|
20
|
+
|
21
|
+
def set_source_path
|
22
|
+
return unless @options[:template]
|
23
|
+
|
24
|
+
custom_template = "#{ENV['HOME']}/.pipedream/templates/#{full_repo_name}"
|
25
|
+
|
26
|
+
if @options[:template_mode] == "replace" # replace the template entirely
|
27
|
+
override_source_paths(custom_template)
|
28
|
+
else # additive: modify on top of default template
|
29
|
+
default_template = File.expand_path("../../template", __FILE__)
|
30
|
+
override_source_paths([custom_template, default_template])
|
31
|
+
end
|
32
|
+
end
|
33
|
+
|
34
|
+
def copy_project
|
35
|
+
puts "Initialize codepipeline project in .pipedream"
|
36
|
+
|
37
|
+
excludes = %w[.git]
|
38
|
+
if @options[:mode] == "light"
|
39
|
+
excludes += %w[
|
40
|
+
settings.yml
|
41
|
+
sns.rb
|
42
|
+
]
|
43
|
+
end
|
44
|
+
pattern = Regexp.new(excludes.join('|'))
|
45
|
+
|
46
|
+
if @options[:template]
|
47
|
+
directory ".", ".pipedream", exclude_pattern: pattern
|
48
|
+
else
|
49
|
+
directory ".", exclude_pattern: pattern
|
50
|
+
end
|
51
|
+
end
|
52
|
+
|
53
|
+
private
|
54
|
+
def project_name
|
55
|
+
inferred_name = File.basename(Dir.pwd).gsub('_','-').gsub(/[^0-9a-zA-Z,-]/, '')
|
56
|
+
@options[:name] || inferred_name
|
57
|
+
end
|
58
|
+
|
59
|
+
def project_github_repo
|
60
|
+
default = "user/repo"
|
61
|
+
return default unless File.exist?(".git/config") && git_installed?
|
62
|
+
|
63
|
+
url = `git config --get remote.origin.url`.strip
|
64
|
+
repo = Dsl::Pipeline::Github.extract_repo_source(url)
|
65
|
+
repo == '' ? default : repo
|
66
|
+
end
|
67
|
+
end
|
68
|
+
end
|
@@ -0,0 +1,88 @@
|
|
1
|
+
require "aws-sdk-s3"
|
2
|
+
|
3
|
+
class Pipedream::Pipeline
|
4
|
+
class S3Bucket
|
5
|
+
extend Memoist
|
6
|
+
include Pipedream::AwsServices
|
7
|
+
|
8
|
+
class << self
|
9
|
+
extend Memoist
|
10
|
+
def name
|
11
|
+
new.name
|
12
|
+
end
|
13
|
+
memoize :name
|
14
|
+
end
|
15
|
+
|
16
|
+
def name
|
17
|
+
ensure_exists(bucket_name)
|
18
|
+
bucket_name
|
19
|
+
end
|
20
|
+
memoize :name
|
21
|
+
|
22
|
+
def bucket_name
|
23
|
+
"codepipeline-#{aws.region}-#{aws.account}"
|
24
|
+
end
|
25
|
+
|
26
|
+
def ensure_exists(name)
|
27
|
+
return if exists?(name) || ENV['TEST']
|
28
|
+
s3.create_bucket(bucket: name)
|
29
|
+
policy = {
|
30
|
+
"Version": "2012-10-17",
|
31
|
+
"Id": "SSEAndSSLPolicy",
|
32
|
+
"Statement": [
|
33
|
+
{
|
34
|
+
"Sid": "DenyUnEncryptedObjectUploads",
|
35
|
+
"Effect": "Deny",
|
36
|
+
"Principal": "*",
|
37
|
+
"Action": "s3:PutObject",
|
38
|
+
"Resource": "arn:aws:s3:::#{name}/*",
|
39
|
+
"Condition": {
|
40
|
+
"StringNotEquals": {
|
41
|
+
"s3:x-amz-server-side-encryption": "aws:kms"
|
42
|
+
}
|
43
|
+
}
|
44
|
+
},
|
45
|
+
{
|
46
|
+
"Sid": "DenyInsecureConnections",
|
47
|
+
"Effect": "Deny",
|
48
|
+
"Principal": "*",
|
49
|
+
"Action": "s3:*",
|
50
|
+
"Resource": "arn:aws:s3:::#{name}/*",
|
51
|
+
"Condition": {
|
52
|
+
"Bool": {
|
53
|
+
"aws:SecureTransport": "false"
|
54
|
+
}
|
55
|
+
}
|
56
|
+
}
|
57
|
+
]
|
58
|
+
}
|
59
|
+
s3.put_bucket_policy(
|
60
|
+
bucket: name,
|
61
|
+
policy: JSON.dump(policy),
|
62
|
+
)
|
63
|
+
rescue Aws::S3::Errors::BucketAlreadyExists => e
|
64
|
+
puts "ERROR #{e.class}: #{e.message}".color(:red)
|
65
|
+
puts "Bucket name: #{name}"
|
66
|
+
exit 1
|
67
|
+
end
|
68
|
+
|
69
|
+
def exists?(name)
|
70
|
+
begin
|
71
|
+
s3.head_bucket(bucket: name)
|
72
|
+
true
|
73
|
+
rescue Aws::S3::Errors::BucketAlreadyOwnedByYou, Aws::S3::Errors::Http301Error
|
74
|
+
# These exceptions indicate bucket already exists
|
75
|
+
# Aws::S3::Errors::Http301Error could be inaccurate but compromising for simplicity
|
76
|
+
true
|
77
|
+
rescue
|
78
|
+
false
|
79
|
+
end
|
80
|
+
end
|
81
|
+
|
82
|
+
private
|
83
|
+
def aws
|
84
|
+
AwsData.new
|
85
|
+
end
|
86
|
+
memoize :aws
|
87
|
+
end
|
88
|
+
end
|
@@ -0,0 +1,61 @@
|
|
1
|
+
module Pipedream
|
2
|
+
class Pipeline
|
3
|
+
extend Memoist
|
4
|
+
include Dsl::Pipeline
|
5
|
+
include Evaluate
|
6
|
+
|
7
|
+
def initialize(options={})
|
8
|
+
@options = options
|
9
|
+
@pipeline_path = options[:pipeline_path] || get_pipeline_path
|
10
|
+
@properties = default_properties # defaults make pipeline.rb simpler
|
11
|
+
@stages = []
|
12
|
+
end
|
13
|
+
|
14
|
+
def run
|
15
|
+
evaluate(@pipeline_path)
|
16
|
+
@properties[:stages] ||= @stages
|
17
|
+
set_source_branch!
|
18
|
+
|
19
|
+
resource = {
|
20
|
+
pipeline: {
|
21
|
+
type: "AWS::CodePipeline::Pipeline",
|
22
|
+
properties: @properties
|
23
|
+
}
|
24
|
+
}
|
25
|
+
CfnCamelizer.transform(resource)
|
26
|
+
end
|
27
|
+
|
28
|
+
def default_properties
|
29
|
+
{
|
30
|
+
name: @options[:full_pipeline_name],
|
31
|
+
role_arn: { "Fn::GetAtt": "IamRole.Arn" },
|
32
|
+
artifact_store: {
|
33
|
+
type: "S3",
|
34
|
+
location: s3_bucket, # auto creates s3 bucket
|
35
|
+
}
|
36
|
+
}
|
37
|
+
end
|
38
|
+
|
39
|
+
# cli branch option always takes highest precedence
|
40
|
+
def set_source_branch!
|
41
|
+
return unless @options[:branch]
|
42
|
+
|
43
|
+
source_stage = @properties[:stages].first
|
44
|
+
action = source_stage[:actions].first
|
45
|
+
action[:configuration][:branch] = @options[:branch]
|
46
|
+
end
|
47
|
+
|
48
|
+
def exist?
|
49
|
+
File.exist?(@pipeline_path)
|
50
|
+
end
|
51
|
+
|
52
|
+
def s3_bucket
|
53
|
+
S3Bucket.name
|
54
|
+
end
|
55
|
+
|
56
|
+
private
|
57
|
+
def get_pipeline_path
|
58
|
+
lookup_codepipeline_file "pipeline.rb"
|
59
|
+
end
|
60
|
+
end
|
61
|
+
end
|
@@ -0,0 +1,181 @@
|
|
1
|
+
require "yaml"
|
2
|
+
|
3
|
+
module Pipedream
|
4
|
+
class Role
|
5
|
+
include Pipedream::Dsl::Role
|
6
|
+
include Evaluate
|
7
|
+
|
8
|
+
def initialize(options={})
|
9
|
+
@options = options
|
10
|
+
@role_path = options[:role_path] || get_role_path
|
11
|
+
@properties = default_properties
|
12
|
+
end
|
13
|
+
|
14
|
+
def run
|
15
|
+
evaluate(@role_path) if File.exist?(@role_path)
|
16
|
+
@properties[:policies] = [{
|
17
|
+
policy_name: "CodePipelineAccess",
|
18
|
+
policy_document: {
|
19
|
+
version: "2012-10-17",
|
20
|
+
statement: derived_iam_statements
|
21
|
+
}
|
22
|
+
}]
|
23
|
+
|
24
|
+
@properties[:managed_policy_arns] = @managed_policy_arns if @managed_policy_arns && !@managed_policy_arns.empty?
|
25
|
+
|
26
|
+
resource = {
|
27
|
+
logical_id => {
|
28
|
+
type: "AWS::IAM::Role",
|
29
|
+
properties: @properties
|
30
|
+
}
|
31
|
+
}
|
32
|
+
CfnCamelizer.transform(resource)
|
33
|
+
end
|
34
|
+
|
35
|
+
def logical_id
|
36
|
+
"IamRole"
|
37
|
+
end
|
38
|
+
|
39
|
+
private
|
40
|
+
def get_role_path
|
41
|
+
lookup_codepipeline_file("role.rb")
|
42
|
+
end
|
43
|
+
|
44
|
+
def default_properties
|
45
|
+
{
|
46
|
+
assume_role_policy_document: {
|
47
|
+
statement: [{
|
48
|
+
action: ["sts:AssumeRole"],
|
49
|
+
effect: "Allow",
|
50
|
+
principal: {
|
51
|
+
service: principal_services
|
52
|
+
}
|
53
|
+
}],
|
54
|
+
version: "2012-10-17"
|
55
|
+
},
|
56
|
+
path: "/"
|
57
|
+
}
|
58
|
+
end
|
59
|
+
|
60
|
+
def principal_services
|
61
|
+
["codepipeline.amazonaws.com"]
|
62
|
+
end
|
63
|
+
|
64
|
+
def derived_iam_statements
|
65
|
+
@iam_statements || default_iam_statements
|
66
|
+
end
|
67
|
+
|
68
|
+
def default_iam_statements
|
69
|
+
# Based on the one created by CodePipeline Console
|
70
|
+
[{
|
71
|
+
"action"=>["iam:PassRole"],
|
72
|
+
"resource"=>"*",
|
73
|
+
"effect"=>"Allow",
|
74
|
+
"condition"=>
|
75
|
+
{"string_equals_if_exists"=>
|
76
|
+
{"iam:passed_to_service"=>
|
77
|
+
["cloudformation.amazonaws.com",
|
78
|
+
"elasticbeanstalk.amazonaws.com",
|
79
|
+
"ec2.amazonaws.com",
|
80
|
+
"ecs-tasks.amazonaws.com"]
|
81
|
+
}
|
82
|
+
}
|
83
|
+
},{
|
84
|
+
"action"=>
|
85
|
+
["codecommit:CancelUploadArchive",
|
86
|
+
"codecommit:GetBranch",
|
87
|
+
"codecommit:GetCommit",
|
88
|
+
"codecommit:GetUploadArchiveStatus",
|
89
|
+
"codecommit:UploadArchive"],
|
90
|
+
"resource"=>"*",
|
91
|
+
"effect"=>"Allow"
|
92
|
+
},{
|
93
|
+
"action"=>
|
94
|
+
["codedeploy:CreateDeployment",
|
95
|
+
"codedeploy:GetApplication",
|
96
|
+
"codedeploy:GetApplicationRevision",
|
97
|
+
"codedeploy:GetDeployment",
|
98
|
+
"codedeploy:GetDeploymentConfig",
|
99
|
+
"codedeploy:RegisterApplicationRevision"],
|
100
|
+
"resource"=>"*",
|
101
|
+
"effect"=>"Allow"
|
102
|
+
},{
|
103
|
+
"action"=>
|
104
|
+
["elasticbeanstalk:*",
|
105
|
+
"ec2:*",
|
106
|
+
"elasticloadbalancing:*",
|
107
|
+
"autoscaling:*",
|
108
|
+
"cloudwatch:*",
|
109
|
+
"s3:*",
|
110
|
+
"sns:*",
|
111
|
+
"cloudformation:*",
|
112
|
+
"rds:*",
|
113
|
+
"sqs:*",
|
114
|
+
"ecs:*"],
|
115
|
+
"resource"=>"*",
|
116
|
+
"effect"=>"Allow"
|
117
|
+
},{
|
118
|
+
"action"=>["lambda:InvokeFunction", "lambda:ListFunctions"],
|
119
|
+
"resource"=>"*",
|
120
|
+
"effect"=>"Allow"
|
121
|
+
},{
|
122
|
+
"action"=>
|
123
|
+
["opsworks:CreateDeployment",
|
124
|
+
"opsworks:DescribeApps",
|
125
|
+
"opsworks:DescribeCommands",
|
126
|
+
"opsworks:DescribeDeployments",
|
127
|
+
"opsworks:DescribeInstances",
|
128
|
+
"opsworks:DescribeStacks",
|
129
|
+
"opsworks:UpdateApp",
|
130
|
+
"opsworks:UpdateStack"],
|
131
|
+
"resource"=>"*",
|
132
|
+
"effect"=>"Allow"
|
133
|
+
},{
|
134
|
+
"action"=>
|
135
|
+
["cloudformation:CreateStack",
|
136
|
+
"cloudformation:DeleteStack",
|
137
|
+
"cloudformation:DescribeStacks",
|
138
|
+
"cloudformation:UpdateStack",
|
139
|
+
"cloudformation:CreateChangeSet",
|
140
|
+
"cloudformation:DeleteChangeSet",
|
141
|
+
"cloudformation:DescribeChangeSet",
|
142
|
+
"cloudformation:ExecuteChangeSet",
|
143
|
+
"cloudformation:SetStackPolicy",
|
144
|
+
"cloudformation:ValidateTemplate"],
|
145
|
+
"resource"=>"*",
|
146
|
+
"effect"=>"Allow"
|
147
|
+
},{
|
148
|
+
"action"=>["codebuild:BatchGetBuilds", "codebuild:StartBuild"],
|
149
|
+
"resource"=>"*",
|
150
|
+
"effect"=>"Allow"
|
151
|
+
},{
|
152
|
+
"action"=>
|
153
|
+
["devicefarm:ListProjects",
|
154
|
+
"devicefarm:ListDevicePools",
|
155
|
+
"devicefarm:GetRun",
|
156
|
+
"devicefarm:GetUpload",
|
157
|
+
"devicefarm:CreateUpload",
|
158
|
+
"devicefarm:ScheduleRun"],
|
159
|
+
"resource"=>"*",
|
160
|
+
"effect"=>"Allow",
|
161
|
+
},{
|
162
|
+
"action"=>
|
163
|
+
["servicecatalog:ListProvisioningArtifacts",
|
164
|
+
"servicecatalog:CreateProvisioningArtifact",
|
165
|
+
"servicecatalog:DescribeProvisioningArtifact",
|
166
|
+
"servicecatalog:DeleteProvisioningArtifact",
|
167
|
+
"servicecatalog:UpdateProduct"],
|
168
|
+
"resource"=>"*",
|
169
|
+
"effect"=>"Allow",
|
170
|
+
},{
|
171
|
+
"action"=>["cloudformation:ValidateTemplate"],
|
172
|
+
"resource"=>"*",
|
173
|
+
"effect"=>"Allow",
|
174
|
+
},{
|
175
|
+
"action"=>["ecr:DescribeImages"],
|
176
|
+
"resource"=>"*",
|
177
|
+
"effect"=>"Allow",
|
178
|
+
}]
|
179
|
+
end
|
180
|
+
end
|
181
|
+
end
|
@@ -0,0 +1,99 @@
|
|
1
|
+
module Pipedream
|
2
|
+
class Schedule
|
3
|
+
include Pipedream::Dsl::Schedule
|
4
|
+
include Evaluate
|
5
|
+
|
6
|
+
def initialize(options={})
|
7
|
+
@options = options
|
8
|
+
@schedule_path = options[:schedule_path] || get_schedule_path
|
9
|
+
@properties = default_properties
|
10
|
+
end
|
11
|
+
|
12
|
+
def run
|
13
|
+
return unless File.exist?(@schedule_path)
|
14
|
+
|
15
|
+
old_properties = @properties.clone
|
16
|
+
evaluate(@schedule_path)
|
17
|
+
|
18
|
+
@properties[:schedule_expression] = @schedule_expression if @schedule_expression
|
19
|
+
set_rule_event! if @rule_event_props
|
20
|
+
return if old_properties == @properties # empty schedule.rb file
|
21
|
+
|
22
|
+
resource = {
|
23
|
+
events_rule: {
|
24
|
+
type: "AWS::Events::Rule",
|
25
|
+
properties: @properties
|
26
|
+
},
|
27
|
+
events_rule_role: events_rule_role,
|
28
|
+
}
|
29
|
+
CfnCamelizer.transform(resource)
|
30
|
+
end
|
31
|
+
|
32
|
+
def set_rule_event!
|
33
|
+
props = @rule_event_props
|
34
|
+
if props.key?(:detail)
|
35
|
+
description = props.key?(:description) ? props.delete(:description) : rule_description
|
36
|
+
rule_props = { event_pattern: props, description: description }
|
37
|
+
else # if props.key?(:event_pattern)
|
38
|
+
props[:description] ||= rule_description
|
39
|
+
rule_props = props
|
40
|
+
end
|
41
|
+
|
42
|
+
@properties.merge!(rule_props)
|
43
|
+
end
|
44
|
+
|
45
|
+
def default_properties
|
46
|
+
description = "CodePipeline #{@options[:full_pipeline_name]}"
|
47
|
+
name = description.gsub(" ", "-").downcase
|
48
|
+
{
|
49
|
+
description: description,
|
50
|
+
# event_pattern: ,
|
51
|
+
name: name,
|
52
|
+
# schedule_expression: ,
|
53
|
+
state: "ENABLED",
|
54
|
+
targets: [{
|
55
|
+
arn: "arn:aws:codepipeline:#{aws.region}:#{aws.account}:#{@options[:full_pipeline_name]}",
|
56
|
+
role_arn: { "Fn::GetAtt": "EventsRuleRole.Arn" }, # required for specific CodePipeline target.
|
57
|
+
id: "CodePipelineTarget",
|
58
|
+
}]
|
59
|
+
}
|
60
|
+
end
|
61
|
+
|
62
|
+
private
|
63
|
+
def get_schedule_path
|
64
|
+
lookup_codepipeline_file("schedule.rb")
|
65
|
+
end
|
66
|
+
|
67
|
+
def events_rule_role
|
68
|
+
{
|
69
|
+
type: "AWS::IAM::Role",
|
70
|
+
properties: {
|
71
|
+
assume_role_policy_document: {
|
72
|
+
statement: [{
|
73
|
+
action: [ "sts:AssumeRole" ],
|
74
|
+
effect: "Allow",
|
75
|
+
principal: { service: [ "events.amazonaws.com" ] }
|
76
|
+
}],
|
77
|
+
version: "2012-10-17"
|
78
|
+
},
|
79
|
+
path: "/",
|
80
|
+
policies: [{
|
81
|
+
policy_name: "CodePipelineAccess",
|
82
|
+
policy_document: {
|
83
|
+
version: "2012-10-17",
|
84
|
+
statement: [{
|
85
|
+
action: "codepipeline:StartPipelineExecution",
|
86
|
+
effect: "Allow",
|
87
|
+
resource: "arn:aws:codepipeline:#{aws.region}:#{aws.account}:#{@options[:full_pipeline_name]}"
|
88
|
+
}]
|
89
|
+
}
|
90
|
+
}]
|
91
|
+
}
|
92
|
+
}
|
93
|
+
end
|
94
|
+
|
95
|
+
def aws
|
96
|
+
@aws ||= AwsData.new
|
97
|
+
end
|
98
|
+
end
|
99
|
+
end
|