pipely 0.8.3 → 0.10.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/pipely/build.rb +2 -16
- data/lib/pipely/build/daily_scheduler.rb +1 -1
- data/lib/pipely/build/definition.rb +30 -2
- data/lib/pipely/build/environment_config.rb +24 -1
- data/lib/pipely/build/s3_path_builder.rb +65 -33
- data/lib/pipely/deploy/bootstrap.rb +17 -14
- data/lib/pipely/deploy/bootstrap_context.rb +87 -10
- data/lib/pipely/deploy/bootstrap_registry.rb +45 -0
- data/lib/pipely/deploy/client.rb +33 -18
- data/lib/pipely/deploy/json_definition.rb +51 -0
- data/lib/pipely/pipeline_date_time/pipeline_date.rb +62 -0
- data/lib/pipely/pipeline_date_time/pipeline_date_pattern.rb +42 -0
- data/lib/pipely/pipeline_date_time/pipeline_date_range_base.rb +44 -0
- data/lib/pipely/pipeline_date_time/pipeline_day_range.rb +14 -0
- data/lib/pipely/pipeline_date_time/pipeline_month_range.rb +26 -0
- data/lib/pipely/pipeline_date_time/pipeline_year_range.rb +25 -0
- data/lib/pipely/tasks/definition.rb +7 -0
- data/lib/pipely/tasks/deploy.rb +7 -0
- data/lib/pipely/tasks/upload_pipeline_as_gem.rb +19 -9
- data/lib/pipely/version.rb +1 -1
- data/spec/fixtures/bootstrap_contexts/green.rb +9 -0
- data/spec/fixtures/bootstrap_contexts/simple.rb +9 -0
- data/spec/fixtures/templates/bootstrap.sh.erb +4 -0
- data/spec/lib/pipely/build/environment_config_spec.rb +58 -0
- data/spec/lib/pipely/build/s3_path_builder_spec.rb +34 -2
- data/spec/lib/pipely/build/template_spec.rb +10 -10
- data/spec/lib/pipely/build_spec.rb +29 -0
- data/spec/lib/pipely/deploy/bootstrap_context_spec.rb +102 -14
- data/spec/lib/pipely/deploy/bootstrap_registry_spec.rb +32 -0
- data/spec/lib/pipely/deploy/bootstrap_spec.rb +41 -24
- data/spec/lib/pipely/pipeline_date_time/pipeline_date_pattern_spec.rb +181 -0
- data/spec/lib/pipely/pipeline_date_time/pipeline_date_range_base_spec.rb +39 -0
- data/spec/lib/pipely/pipeline_date_time/pipeline_date_spec.rb +110 -0
- data/spec/lib/pipely/pipeline_date_time/pipeline_day_range_spec.rb +23 -0
- data/spec/lib/pipely/pipeline_date_time/pipeline_month_range_spec.rb +93 -0
- data/spec/lib/pipely/pipeline_date_time/pipeline_year_range_spec.rb +93 -0
- data/spec/lib/pipely/tasks/upload_pipeline_as_gem_spec.rb +59 -0
- metadata +49 -3
data/lib/pipely/deploy/client.rb
CHANGED
@@ -15,6 +15,7 @@ require 'aws-sdk'
|
|
15
15
|
require 'logger'
|
16
16
|
require 'tempfile'
|
17
17
|
require 'securerandom'
|
18
|
+
require 'pipely/deploy/json_definition'
|
18
19
|
|
19
20
|
module Pipely
|
20
21
|
module Deploy
|
@@ -49,16 +50,18 @@ module Pipely
|
|
49
50
|
created_pipeline_id = create_pipeline(pipeline_name,
|
50
51
|
definition,
|
51
52
|
tags)
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
53
|
+
if created_pipeline_id
|
54
|
+
@log.info("Created pipeline id '#{created_pipeline_id}'")
|
55
|
+
|
56
|
+
# Delete old pipelines
|
57
|
+
pipeline_ids.each do |pipeline_id|
|
58
|
+
begin
|
59
|
+
delete_pipeline(pipeline_id)
|
60
|
+
@log.info("Deleted pipeline '#{pipeline_id}'")
|
61
|
+
|
62
|
+
rescue PipelineDeployerError => error
|
63
|
+
@log.warn(error)
|
64
|
+
end
|
62
65
|
end
|
63
66
|
end
|
64
67
|
|
@@ -83,20 +86,32 @@ module Pipely
|
|
83
86
|
end
|
84
87
|
|
85
88
|
def create_pipeline(pipeline_name, definition, tags={})
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
89
|
+
# Use Fog gem, instead of aws-sdk gem, to create pipeline with tags.
|
90
|
+
#
|
91
|
+
# TODO: Consolidate on aws-sdk when tagging support is added.
|
92
|
+
#
|
90
93
|
created_pipeline = @data_pipelines.pipelines.create(
|
91
|
-
unique_id:
|
94
|
+
unique_id: SecureRandom.uuid,
|
92
95
|
name: pipeline_name,
|
93
96
|
tags: default_tags.merge(tags)
|
94
97
|
)
|
95
98
|
|
96
|
-
|
97
|
-
|
99
|
+
# Use aws-sdk gem, instead of Fog, to put definition and activate
|
100
|
+
# pipeline, for improved reporting of validation errors.
|
101
|
+
#
|
102
|
+
response = @aws.put_pipeline_definition(
|
103
|
+
pipeline_id: created_pipeline.id,
|
104
|
+
pipeline_objects: JSONDefinition.parse(definition)
|
105
|
+
)
|
98
106
|
|
99
|
-
|
107
|
+
if response[:errored]
|
108
|
+
@log.error("Failed to put pipeline definition.")
|
109
|
+
@log.error(response[:validation_errors].inspect)
|
110
|
+
false
|
111
|
+
else
|
112
|
+
@aws.activate_pipeline(pipeline_id: created_pipeline.id)
|
113
|
+
created_pipeline.id
|
114
|
+
end
|
100
115
|
end
|
101
116
|
|
102
117
|
def delete_pipeline(pipeline_id)
|
@@ -0,0 +1,51 @@
|
|
1
|
+
require 'json'
|
2
|
+
|
3
|
+
module Pipely
|
4
|
+
module Deploy
|
5
|
+
|
6
|
+
# The JSON definition format expected by the CLI differs from the structure
|
7
|
+
# expected by the API. This class transforms a CLI-ready definition into
|
8
|
+
# the pipeline object hashes expected by the API.
|
9
|
+
#
|
10
|
+
class JSONDefinition
|
11
|
+
def self.parse(definition)
|
12
|
+
definition_objects = JSON.parse(definition)['objects']
|
13
|
+
definition_objects.map { |object| new(object).to_api }
|
14
|
+
end
|
15
|
+
|
16
|
+
def initialize(object)
|
17
|
+
@json_fields = object.clone
|
18
|
+
@id = @json_fields.delete('id')
|
19
|
+
@name = @json_fields.delete('name') || @id
|
20
|
+
end
|
21
|
+
|
22
|
+
def to_api
|
23
|
+
{
|
24
|
+
'id' => @id,
|
25
|
+
'name' => @name,
|
26
|
+
'fields' => fields
|
27
|
+
}
|
28
|
+
end
|
29
|
+
|
30
|
+
private
|
31
|
+
|
32
|
+
def fields
|
33
|
+
@json_fields.map{|k,v| field_for_kv(k,v)}.flatten
|
34
|
+
end
|
35
|
+
|
36
|
+
def field_for_kv(key, value)
|
37
|
+
if value.is_a?(Hash)
|
38
|
+
{ 'key' => key, 'refValue' => value['ref'] }
|
39
|
+
|
40
|
+
elsif value.is_a?(Array)
|
41
|
+
value.map { |subvalue| field_for_kv(key, subvalue) }
|
42
|
+
|
43
|
+
else
|
44
|
+
{ 'key' => key, 'stringValue' => value }
|
45
|
+
|
46
|
+
end
|
47
|
+
end
|
48
|
+
end
|
49
|
+
|
50
|
+
end
|
51
|
+
end
|
@@ -0,0 +1,62 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
module Pipely
|
3
|
+
module PipelineDateTime
|
4
|
+
# Encapsulates AWS pipeline date
|
5
|
+
#
|
6
|
+
class PipelineDate
|
7
|
+
DEFAULT_DAY_FORMAT = 'YYYY/MM/dd'
|
8
|
+
DEFAULT_MONTH_FORMAT = 'YYYY/MM'
|
9
|
+
DEFAULT_YEAR_FORMAT = 'YYYY'
|
10
|
+
|
11
|
+
class << self
|
12
|
+
def day_format=(day_format)
|
13
|
+
@day_format = day_format
|
14
|
+
end
|
15
|
+
|
16
|
+
def day_format
|
17
|
+
@day_format || DEFAULT_DAY_FORMAT
|
18
|
+
end
|
19
|
+
|
20
|
+
def month_format=(month_format)
|
21
|
+
@month_format = month_format
|
22
|
+
end
|
23
|
+
|
24
|
+
def month_format
|
25
|
+
@month_format || DEFAULT_MONTH_FORMAT
|
26
|
+
end
|
27
|
+
|
28
|
+
def year_format=(year_format)
|
29
|
+
@year_format = year_format
|
30
|
+
end
|
31
|
+
|
32
|
+
def year_format
|
33
|
+
@year_format || DEFAULT_YEAR_FORMAT
|
34
|
+
end
|
35
|
+
end
|
36
|
+
|
37
|
+
def initialize(target_date, days_back)
|
38
|
+
days_back = days_back.to_i
|
39
|
+
@date_expression = case
|
40
|
+
when days_back > 0
|
41
|
+
"minusDays(#{target_date}, #{days_back})"
|
42
|
+
when days_back == 0
|
43
|
+
target_date
|
44
|
+
else
|
45
|
+
"plusDays(#{target_date}, #{-days_back})"
|
46
|
+
end
|
47
|
+
end
|
48
|
+
|
49
|
+
def day
|
50
|
+
"\#{format(#{@date_expression}, \"#{PipelineDate.day_format}\")}"
|
51
|
+
end
|
52
|
+
|
53
|
+
def month
|
54
|
+
"\#{format(#{@date_expression}, \"#{PipelineDate.month_format}\")}"
|
55
|
+
end
|
56
|
+
|
57
|
+
def year
|
58
|
+
"\#{format(#{@date_expression}, \"#{PipelineDate.year_format}\")}"
|
59
|
+
end
|
60
|
+
end
|
61
|
+
end
|
62
|
+
end
|
@@ -0,0 +1,42 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require 'pipely/pipeline_date_time/pipeline_day_range'
|
3
|
+
require 'pipely/pipeline_date_time/pipeline_month_range'
|
4
|
+
require 'pipely/pipeline_date_time/pipeline_year_range'
|
5
|
+
|
6
|
+
module Pipely
|
7
|
+
module PipelineDateTime
|
8
|
+
# Mixin for constructing compact date pattern selections
|
9
|
+
#
|
10
|
+
module PipelineDatePattern
|
11
|
+
def date_pattern
|
12
|
+
selection.target_all_time ? '.*' : any_string(date_pattern_parts)
|
13
|
+
end
|
14
|
+
|
15
|
+
private
|
16
|
+
|
17
|
+
def date_pattern_parts
|
18
|
+
day_range.exclude(month_range.start, month_range.end)
|
19
|
+
month_range.exclude(year_range.start, year_range.end)
|
20
|
+
day_range.days + month_range.months + year_range.years
|
21
|
+
end
|
22
|
+
|
23
|
+
def day_range
|
24
|
+
@day_range ||= PipelineDayRange.new(selection.target_date, num_days, 0)
|
25
|
+
end
|
26
|
+
|
27
|
+
def month_range
|
28
|
+
@month_range ||= PipelineMonthRange.new(selection.target_date, num_days,
|
29
|
+
0)
|
30
|
+
end
|
31
|
+
|
32
|
+
def year_range
|
33
|
+
@year_range ||= PipelineYearRange.new(selection.target_date, num_days,
|
34
|
+
0)
|
35
|
+
end
|
36
|
+
|
37
|
+
def num_days
|
38
|
+
selection.num_days_back.to_i
|
39
|
+
end
|
40
|
+
end
|
41
|
+
end
|
42
|
+
end
|
@@ -0,0 +1,44 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require 'pipely/pipeline_date_time/pipeline_date'
|
3
|
+
|
4
|
+
module Pipely
|
5
|
+
module PipelineDateTime
|
6
|
+
# Base class for pipeline date ranges
|
7
|
+
#
|
8
|
+
class PipelineDateRangeBase
|
9
|
+
attr_reader :days_back
|
10
|
+
|
11
|
+
def initialize(target_date, days_back_start, days_back_end)
|
12
|
+
@target_date = target_date
|
13
|
+
@days_back_start = days_back_start
|
14
|
+
@days_back_end = days_back_end
|
15
|
+
@days_back = (days_back_end..days_back_start).to_set
|
16
|
+
end
|
17
|
+
|
18
|
+
def start
|
19
|
+
@days_back_start
|
20
|
+
end
|
21
|
+
|
22
|
+
def end
|
23
|
+
@days_back_end
|
24
|
+
end
|
25
|
+
|
26
|
+
def exclude(days_back_start, days_back_end)
|
27
|
+
return if days_back_start < 0
|
28
|
+
return if days_back_end < 0
|
29
|
+
return if days_back_start < days_back_end # Back smaller for earlier
|
30
|
+
(days_back_end..days_back_start).each do |days_back|
|
31
|
+
@days_back.delete days_back
|
32
|
+
end
|
33
|
+
end
|
34
|
+
|
35
|
+
private
|
36
|
+
|
37
|
+
def pipeline_dates
|
38
|
+
@pipeline_dates ||= @days_back.map do |days_back|
|
39
|
+
PipelineDate.new(@target_date, days_back)
|
40
|
+
end
|
41
|
+
end
|
42
|
+
end
|
43
|
+
end
|
44
|
+
end
|
@@ -0,0 +1,14 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require 'pipely/pipeline_date_time/pipeline_date_range_base'
|
3
|
+
|
4
|
+
module Pipely
|
5
|
+
module PipelineDateTime
|
6
|
+
# Class that represents a range of individual pipeline days
|
7
|
+
#
|
8
|
+
class PipelineDayRange < PipelineDateRangeBase
|
9
|
+
def days
|
10
|
+
@days ||= pipeline_dates.map { |pd| pd.day }
|
11
|
+
end
|
12
|
+
end
|
13
|
+
end
|
14
|
+
end
|
@@ -0,0 +1,26 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require 'pipely/pipeline_date_time/pipeline_date_range_base'
|
3
|
+
|
4
|
+
module Pipely
|
5
|
+
module PipelineDateTime
|
6
|
+
# Class that represents a range of individual pipeline months
|
7
|
+
#
|
8
|
+
class PipelineMonthRange < PipelineDateRangeBase
|
9
|
+
MINIMUM_MONTH_OFFSET = 30 # The month of x+/-30 will never add extra days
|
10
|
+
MONTH_INTERVAL = 28 # We never miss a month by taking every 28 days
|
11
|
+
|
12
|
+
attr_reader :start, :end
|
13
|
+
|
14
|
+
def initialize(target_date, days_back_start, days_back_end)
|
15
|
+
@target_date = target_date
|
16
|
+
@start = days_back_start - MINIMUM_MONTH_OFFSET
|
17
|
+
@end = days_back_end + MINIMUM_MONTH_OFFSET
|
18
|
+
@days_back = (@end..@start).step(MONTH_INTERVAL).to_set
|
19
|
+
end
|
20
|
+
|
21
|
+
def months
|
22
|
+
@months ||= pipeline_dates.map { |pd| pd.month }
|
23
|
+
end
|
24
|
+
end
|
25
|
+
end
|
26
|
+
end
|
@@ -0,0 +1,25 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require 'pipely/pipeline_date_time/pipeline_date_range_base'
|
3
|
+
|
4
|
+
module Pipely
|
5
|
+
module PipelineDateTime
|
6
|
+
# Class that represents a range of individual pipeline years
|
7
|
+
#
|
8
|
+
class PipelineYearRange < PipelineDateRangeBase
|
9
|
+
DAYS_IN_YEAR = 365
|
10
|
+
|
11
|
+
attr_reader :start, :end
|
12
|
+
|
13
|
+
def initialize(target_date, days_back_start, days_back_end)
|
14
|
+
@target_date = target_date
|
15
|
+
@start = days_back_start - DAYS_IN_YEAR
|
16
|
+
@end = days_back_end + DAYS_IN_YEAR
|
17
|
+
@days_back = (@end..@start).step(DAYS_IN_YEAR).to_set
|
18
|
+
end
|
19
|
+
|
20
|
+
def years
|
21
|
+
@years ||= pipeline_dates.map { |pd| pd.year }
|
22
|
+
end
|
23
|
+
end
|
24
|
+
end
|
25
|
+
end
|
@@ -34,6 +34,9 @@ module Pipely
|
|
34
34
|
def initialize(*args, &task_block)
|
35
35
|
setup_ivars(args)
|
36
36
|
|
37
|
+
# First non-name parameter allows overriding the configured scheduler.
|
38
|
+
args.unshift(:scheduler)
|
39
|
+
|
37
40
|
directory path
|
38
41
|
|
39
42
|
desc "Generates the pipeline definition file"
|
@@ -43,6 +46,10 @@ module Pipely
|
|
43
46
|
task_block.call(*[self, task_args].slice(0, task_block.arity))
|
44
47
|
end
|
45
48
|
|
49
|
+
if scheduler_override = task_args[:scheduler]
|
50
|
+
definition.config[:scheduler] = scheduler_override
|
51
|
+
end
|
52
|
+
|
46
53
|
run_task verbose
|
47
54
|
end
|
48
55
|
end
|
data/lib/pipely/tasks/deploy.rb
CHANGED
@@ -26,6 +26,9 @@ module Pipely
|
|
26
26
|
def initialize(*args, &task_block)
|
27
27
|
setup_ivars(args)
|
28
28
|
|
29
|
+
# First non-name parameter allows overriding the configured scheduler.
|
30
|
+
args.unshift(:scheduler)
|
31
|
+
|
29
32
|
desc "Deploy pipeline" unless ::Rake.application.last_comment
|
30
33
|
|
31
34
|
task name, *args do |_, task_args|
|
@@ -34,6 +37,10 @@ module Pipely
|
|
34
37
|
task_block.call(*[self, task_args].slice(0, task_block.arity))
|
35
38
|
end
|
36
39
|
|
40
|
+
if scheduler_override = task_args[:scheduler]
|
41
|
+
definition.config[:scheduler] = scheduler_override
|
42
|
+
end
|
43
|
+
|
37
44
|
run_task verbose
|
38
45
|
end
|
39
46
|
end
|
@@ -1,4 +1,5 @@
|
|
1
1
|
require 'rake'
|
2
|
+
require 'rake/tasklib'
|
2
3
|
require 'aws'
|
3
4
|
require 'erubis'
|
4
5
|
require 'pipely/deploy/bootstrap'
|
@@ -15,6 +16,7 @@ module Pipely
|
|
15
16
|
attr_accessor :s3_steps_path
|
16
17
|
attr_accessor :s3_gems_path
|
17
18
|
attr_accessor :config
|
19
|
+
attr_accessor :templates
|
18
20
|
|
19
21
|
def initialize(*args, &task_block)
|
20
22
|
setup_ivars(args)
|
@@ -35,12 +37,14 @@ module Pipely
|
|
35
37
|
def setup_ivars(args)
|
36
38
|
@name = args.shift || 'deploy:upload_pipeline_as_gem'
|
37
39
|
@verbose = true
|
40
|
+
@templates = Dir.glob("templates/*.erb")
|
38
41
|
end
|
39
42
|
|
40
43
|
def run_task(verbose)
|
41
|
-
|
44
|
+
s3_gem_paths = upload_gems
|
45
|
+
context = build_bootstrap_context(s3_gem_paths)
|
42
46
|
|
43
|
-
|
47
|
+
templates.each do |erb_file|
|
44
48
|
upload_filename = File.basename(erb_file).sub( /\.erb$/, '' )
|
45
49
|
|
46
50
|
# Exclude the pipeline.json
|
@@ -55,18 +59,24 @@ module Pipely
|
|
55
59
|
|
56
60
|
private
|
57
61
|
def s3_bucket
|
58
|
-
|
59
|
-
s3.buckets[@bucket_name]
|
62
|
+
@s3_bucket ||= AWS::S3.new.buckets[@bucket_name]
|
60
63
|
end
|
61
64
|
|
62
|
-
def
|
63
|
-
|
64
|
-
|
65
|
-
|
65
|
+
def upload_gems
|
66
|
+
pipeline_gems = Pipely::Bundler.gem_files
|
67
|
+
s3_uploader = Pipely::Deploy::S3Uploader.new(s3_bucket, s3_gems_path)
|
68
|
+
s3_uploader.upload(pipeline_gems.values)
|
69
|
+
s3_uploader.s3_urls(pipeline_gems.values)
|
70
|
+
end
|
71
|
+
|
72
|
+
def build_bootstrap_context(s3_gems)
|
73
|
+
bootstrap_helper = Pipely::Deploy::Bootstrap.new(s3_gems, s3_steps_path)
|
74
|
+
|
75
|
+
context = bootstrap_helper.context(config['bootstrap_mixins'])
|
66
76
|
|
67
77
|
# erb context
|
68
78
|
{
|
69
|
-
bootstrap:
|
79
|
+
bootstrap: context,
|
70
80
|
config: config
|
71
81
|
}
|
72
82
|
end
|