pipely 0.8.3 → 0.10.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/pipely/build.rb +2 -16
- data/lib/pipely/build/daily_scheduler.rb +1 -1
- data/lib/pipely/build/definition.rb +30 -2
- data/lib/pipely/build/environment_config.rb +24 -1
- data/lib/pipely/build/s3_path_builder.rb +65 -33
- data/lib/pipely/deploy/bootstrap.rb +17 -14
- data/lib/pipely/deploy/bootstrap_context.rb +87 -10
- data/lib/pipely/deploy/bootstrap_registry.rb +45 -0
- data/lib/pipely/deploy/client.rb +33 -18
- data/lib/pipely/deploy/json_definition.rb +51 -0
- data/lib/pipely/pipeline_date_time/pipeline_date.rb +62 -0
- data/lib/pipely/pipeline_date_time/pipeline_date_pattern.rb +42 -0
- data/lib/pipely/pipeline_date_time/pipeline_date_range_base.rb +44 -0
- data/lib/pipely/pipeline_date_time/pipeline_day_range.rb +14 -0
- data/lib/pipely/pipeline_date_time/pipeline_month_range.rb +26 -0
- data/lib/pipely/pipeline_date_time/pipeline_year_range.rb +25 -0
- data/lib/pipely/tasks/definition.rb +7 -0
- data/lib/pipely/tasks/deploy.rb +7 -0
- data/lib/pipely/tasks/upload_pipeline_as_gem.rb +19 -9
- data/lib/pipely/version.rb +1 -1
- data/spec/fixtures/bootstrap_contexts/green.rb +9 -0
- data/spec/fixtures/bootstrap_contexts/simple.rb +9 -0
- data/spec/fixtures/templates/bootstrap.sh.erb +4 -0
- data/spec/lib/pipely/build/environment_config_spec.rb +58 -0
- data/spec/lib/pipely/build/s3_path_builder_spec.rb +34 -2
- data/spec/lib/pipely/build/template_spec.rb +10 -10
- data/spec/lib/pipely/build_spec.rb +29 -0
- data/spec/lib/pipely/deploy/bootstrap_context_spec.rb +102 -14
- data/spec/lib/pipely/deploy/bootstrap_registry_spec.rb +32 -0
- data/spec/lib/pipely/deploy/bootstrap_spec.rb +41 -24
- data/spec/lib/pipely/pipeline_date_time/pipeline_date_pattern_spec.rb +181 -0
- data/spec/lib/pipely/pipeline_date_time/pipeline_date_range_base_spec.rb +39 -0
- data/spec/lib/pipely/pipeline_date_time/pipeline_date_spec.rb +110 -0
- data/spec/lib/pipely/pipeline_date_time/pipeline_day_range_spec.rb +23 -0
- data/spec/lib/pipely/pipeline_date_time/pipeline_month_range_spec.rb +93 -0
- data/spec/lib/pipely/pipeline_date_time/pipeline_year_range_spec.rb +93 -0
- data/spec/lib/pipely/tasks/upload_pipeline_as_gem_spec.rb +59 -0
- metadata +49 -3
data/lib/pipely/version.rb
CHANGED
@@ -0,0 +1,58 @@
|
|
1
|
+
require 'pipely/build/environment_config'
|
2
|
+
|
3
|
+
describe Pipely::Build::EnvironmentConfig do
|
4
|
+
|
5
|
+
describe '.load(filename, environment)' do
|
6
|
+
let(:filename) { 'path/to/config/yaml.yml' }
|
7
|
+
|
8
|
+
let(:config) do
|
9
|
+
YAML.load(<<-EOS)
|
10
|
+
my_env:
|
11
|
+
key: 'my_val'
|
12
|
+
production:
|
13
|
+
key: 'prod_val'
|
14
|
+
staging:
|
15
|
+
key: 'staging_val'
|
16
|
+
EOS
|
17
|
+
end
|
18
|
+
|
19
|
+
before do
|
20
|
+
allow(YAML).to receive(:load_file).with(filename) { config }
|
21
|
+
end
|
22
|
+
|
23
|
+
context 'given a custom environment' do
|
24
|
+
subject { described_class.load(filename, 'my_env') }
|
25
|
+
|
26
|
+
it 'loads config from a YAML file' do
|
27
|
+
expect(subject[:key]).to eq('my_val')
|
28
|
+
end
|
29
|
+
end
|
30
|
+
|
31
|
+
context 'given the "production" environment' do
|
32
|
+
subject { described_class.load(filename, 'production') }
|
33
|
+
|
34
|
+
it 'loads config from a YAML file' do
|
35
|
+
expect(subject[:key]).to eq('prod_val')
|
36
|
+
end
|
37
|
+
|
38
|
+
it 'supports legacy defaults' do
|
39
|
+
expect(subject[:s3_prefix]).to eq('production/:namespace')
|
40
|
+
expect(subject[:scheduler]).to eq('daily')
|
41
|
+
end
|
42
|
+
end
|
43
|
+
|
44
|
+
context 'given the "staging" environment' do
|
45
|
+
subject { described_class.load(filename, 'staging') }
|
46
|
+
|
47
|
+
it 'loads config from a YAML file' do
|
48
|
+
expect(subject[:key]).to eq('staging_val')
|
49
|
+
end
|
50
|
+
|
51
|
+
it 'supports legacy defaults' do
|
52
|
+
expect(subject[:s3_prefix]).to eq('staging/:whoami/:namespace')
|
53
|
+
expect(subject[:scheduler]).to eq('now')
|
54
|
+
end
|
55
|
+
end
|
56
|
+
end
|
57
|
+
|
58
|
+
end
|
@@ -35,9 +35,13 @@ describe Pipely::Build::S3PathBuilder do
|
|
35
35
|
should eq("s3://asset-bucket/run-prefix/shared/\#{format(@scheduledStartTime,'YYYY-MM-dd')}")
|
36
36
|
}
|
37
37
|
|
38
|
+
its(:bucket_relative_s3_asset_prefix) {
|
39
|
+
should eq("run-prefix/\#{format(@scheduledStartTime,'YYYY-MM-dd_HHmmss')}")
|
40
|
+
}
|
41
|
+
|
38
42
|
describe "#to_hash" do
|
39
43
|
it 'includes the necessary keys for supplying config to a Template' do
|
40
|
-
expect(subject.to_hash.keys).to
|
44
|
+
expect(subject.to_hash.keys).to include(
|
41
45
|
:s3_log_prefix,
|
42
46
|
:s3_step_prefix,
|
43
47
|
:s3n_step_prefix,
|
@@ -45,7 +49,35 @@ describe Pipely::Build::S3PathBuilder do
|
|
45
49
|
:s3n_asset_prefix,
|
46
50
|
:s3_shared_asset_prefix,
|
47
51
|
:bucket_relative_s3_asset_prefix,
|
48
|
-
|
52
|
+
)
|
53
|
+
end
|
54
|
+
end
|
55
|
+
|
56
|
+
context "when a custom template is specified via config" do
|
57
|
+
subject {
|
58
|
+
described_class.new(
|
59
|
+
foo: 'my-value',
|
60
|
+
templates: {
|
61
|
+
bar: ':protocol://my-bucket/:foo/okay'
|
62
|
+
}
|
63
|
+
)
|
64
|
+
}
|
65
|
+
|
66
|
+
its(:s3_bar_prefix) {
|
67
|
+
should eq('s3://my-bucket/my-value/okay')
|
68
|
+
}
|
69
|
+
|
70
|
+
its(:s3n_bar_prefix) {
|
71
|
+
should eq('s3n://my-bucket/my-value/okay')
|
72
|
+
}
|
73
|
+
|
74
|
+
describe "#to_hash" do
|
75
|
+
it 'includes the keys for the custom template' do
|
76
|
+
expect(subject.to_hash.keys).to include(
|
77
|
+
:s3_bar_prefix,
|
78
|
+
:s3n_bar_prefix,
|
79
|
+
)
|
80
|
+
end
|
49
81
|
end
|
50
82
|
end
|
51
83
|
|
@@ -17,17 +17,17 @@ describe Pipely::Build::Template do
|
|
17
17
|
end
|
18
18
|
|
19
19
|
describe "#streaming_hadoop_step(options)" do
|
20
|
-
let(:s3_path_builder) {
|
21
|
-
Pipely::Build::S3PathBuilder.new(
|
22
|
-
logs: 'log-bucket',
|
23
|
-
steps: 'step-bucket',
|
24
|
-
assets: 'asset-bucket',
|
25
|
-
prefix: 'run-prefix'
|
26
|
-
)
|
27
|
-
}
|
28
|
-
|
29
20
|
before do
|
30
|
-
|
21
|
+
# emulate applying config from S3PathBuilder, as done in Definition#to_json
|
22
|
+
subject.apply_config({
|
23
|
+
s3_log_prefix: "s3://log-bucket/run-prefix/\#{format(@scheduledStartTime,'YYYY-MM-dd_HHmmss')}",
|
24
|
+
s3_step_prefix: "s3://step-bucket/run-prefix",
|
25
|
+
s3n_step_prefix: "s3n://step-bucket/run-prefix",
|
26
|
+
s3_asset_prefix: "s3://asset-bucket/run-prefix/\#{format(@scheduledStartTime,'YYYY-MM-dd_HHmmss')}",
|
27
|
+
s3n_asset_prefix: "s3n://asset-bucket/run-prefix/\#{format(@scheduledStartTime,'YYYY-MM-dd_HHmmss')}",
|
28
|
+
s3_shared_asset_prefix: "s3://asset-bucket/run-prefix/shared/\#{format(@scheduledStartTime,'YYYY-MM-dd')}",
|
29
|
+
bucket_relative_s3_asset_prefix: "run-prefix/\#{format(@scheduledStartTime,'YYYY-MM-dd_HHmmss')}",
|
30
|
+
})
|
31
31
|
end
|
32
32
|
|
33
33
|
it "builds a streaming hadoop step" do
|
@@ -1,3 +1,32 @@
|
|
1
|
+
require 'pipely/build'
|
2
|
+
|
1
3
|
describe Pipely::Build do
|
2
4
|
|
5
|
+
describe '.build_definition(template, environment, config_path)' do
|
6
|
+
|
7
|
+
let(:template) { double }
|
8
|
+
let(:environment) { 'production' }
|
9
|
+
let(:config_path) { 'path/to/config' }
|
10
|
+
|
11
|
+
let(:config) { double }
|
12
|
+
|
13
|
+
before do
|
14
|
+
allow(Pipely::Build::EnvironmentConfig).to receive(:load).
|
15
|
+
with(config_path, environment.to_sym).
|
16
|
+
and_return(config)
|
17
|
+
end
|
18
|
+
|
19
|
+
it 'builds a Definition' do
|
20
|
+
expect(
|
21
|
+
described_class.build_definition(template, environment, config_path)
|
22
|
+
).to eq(
|
23
|
+
Pipely::Build::Definition.new(
|
24
|
+
template,
|
25
|
+
environment.to_sym,
|
26
|
+
config
|
27
|
+
)
|
28
|
+
)
|
29
|
+
end
|
30
|
+
end
|
31
|
+
|
3
32
|
end
|
@@ -2,7 +2,6 @@
|
|
2
2
|
|
3
3
|
require 'spec_helper'
|
4
4
|
require 'pipely/deploy/bootstrap_context'
|
5
|
-
require 'fileutils'
|
6
5
|
|
7
6
|
describe Pipely::Deploy::BootstrapContext do
|
8
7
|
subject do
|
@@ -11,9 +10,20 @@ describe Pipely::Deploy::BootstrapContext do
|
|
11
10
|
end
|
12
11
|
end
|
13
12
|
|
14
|
-
|
15
|
-
|
16
|
-
|
13
|
+
let(:aws_install_gems_script) do
|
14
|
+
"
|
15
|
+
# one.gem
|
16
|
+
aws s3 cp one.gem one.gem
|
17
|
+
gem install --force --local one.gem --no-ri --no-rdoc
|
18
|
+
|
19
|
+
# two.gem
|
20
|
+
aws s3 cp two.gem two.gem
|
21
|
+
gem install --force --local two.gem --no-ri --no-rdoc
|
22
|
+
"
|
23
|
+
end
|
24
|
+
|
25
|
+
let(:hadoop_install_gems_script) do
|
26
|
+
"
|
17
27
|
# one.gem
|
18
28
|
hadoop fs -copyToLocal one.gem one.gem
|
19
29
|
gem install --force --local one.gem --no-ri --no-rdoc
|
@@ -22,35 +32,113 @@ gem install --force --local one.gem --no-ri --no-rdoc
|
|
22
32
|
hadoop fs -copyToLocal two.gem two.gem
|
23
33
|
gem install --force --local two.gem --no-ri --no-rdoc
|
24
34
|
"
|
35
|
+
end
|
36
|
+
|
37
|
+
describe "#install_gems_script" do
|
38
|
+
it "with hadoop fs" do
|
39
|
+
expect(subject.install_gems_script(:hadoop_fs)).to eql(
|
40
|
+
hadoop_install_gems_script)
|
25
41
|
end
|
26
42
|
|
27
43
|
context "with aws cli" do
|
28
44
|
it "should build script for aws cli" do
|
29
|
-
expect(subject.install_gems_script(:awscli) ).to eql
|
45
|
+
expect(subject.install_gems_script(:awscli) ).to eql(
|
46
|
+
aws_install_gems_script)
|
47
|
+
end
|
48
|
+
end
|
49
|
+
|
50
|
+
context "with yield" do
|
51
|
+
it "should build script for aws cli" do
|
52
|
+
expect(subject.install_gems_script(:awscli) do |command,file,filename|
|
53
|
+
"custom command - #{file} #{filename} #{command}"
|
54
|
+
end).to eql "
|
30
55
|
# one.gem
|
31
|
-
|
56
|
+
custom command - one.gem one.gem aws s3 cp
|
32
57
|
gem install --force --local one.gem --no-ri --no-rdoc
|
33
58
|
|
34
59
|
# two.gem
|
35
|
-
|
60
|
+
custom command - two.gem two.gem aws s3 cp
|
36
61
|
gem install --force --local two.gem --no-ri --no-rdoc
|
37
62
|
"
|
38
63
|
end
|
39
64
|
end
|
40
65
|
|
41
|
-
context "
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
end).to eql "
|
66
|
+
context "using the emr context" do
|
67
|
+
describe "#install_gems_script" do
|
68
|
+
it "build script using hadoop fs" do
|
69
|
+
expect(subject.install_gems_script(:hadoop_fs)).to eql "
|
46
70
|
# one.gem
|
47
|
-
|
71
|
+
hadoop fs -copyToLocal one.gem one.gem
|
48
72
|
gem install --force --local one.gem --no-ri --no-rdoc
|
49
73
|
|
50
74
|
# two.gem
|
51
|
-
|
75
|
+
hadoop fs -copyToLocal two.gem two.gem
|
52
76
|
gem install --force --local two.gem --no-ri --no-rdoc
|
53
77
|
"
|
78
|
+
end
|
79
|
+
end
|
80
|
+
end
|
81
|
+
|
82
|
+
context "using the emr context" do
|
83
|
+
let(:emr) { subject.emr }
|
84
|
+
|
85
|
+
describe '#install_gems_script' do
|
86
|
+
it 'should be same as parent hadoop install script' do
|
87
|
+
expect(emr.install_gems_script).to eq(hadoop_install_gems_script)
|
88
|
+
end
|
89
|
+
end
|
90
|
+
end
|
91
|
+
|
92
|
+
context "using the ec2 context" do
|
93
|
+
let(:ec2) { subject.ec2 }
|
94
|
+
|
95
|
+
describe '#install_gems_script' do
|
96
|
+
it 'should be same as parent aws install script' do
|
97
|
+
expect(ec2.install_gems_script).to eq(aws_install_gems_script)
|
98
|
+
end
|
99
|
+
end
|
100
|
+
|
101
|
+
describe "#as_root" do
|
102
|
+
|
103
|
+
context "on first run" do
|
104
|
+
it "should build script with ssh init" do
|
105
|
+
expect(ec2.as_root { "Custom Script here" }).to eql "
|
106
|
+
# Set up ssh access
|
107
|
+
if [ ! -f ~/.ssh/id_rsa ]; then
|
108
|
+
mkdir -p ~/.ssh
|
109
|
+
ssh-keygen -P '' -f ~/.ssh/id_rsa
|
110
|
+
cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys
|
111
|
+
chmod 600 ~/.ssh/authorized_keys
|
112
|
+
fi
|
113
|
+
|
114
|
+
# Use ssh to bypass the sudo \"require tty\" setting
|
115
|
+
ssh -o \"StrictHostKeyChecking no\" -t -t ec2-user@localhost <<- EOF
|
116
|
+
sudo su -;
|
117
|
+
Custom Script here
|
118
|
+
# exit twice, once for su and once for ssh
|
119
|
+
exit;
|
120
|
+
exit;
|
121
|
+
EOF
|
122
|
+
"
|
123
|
+
end
|
124
|
+
end
|
125
|
+
|
126
|
+
context "on consective runs" do
|
127
|
+
it "should build script" do
|
128
|
+
ec2.as_root { "First run" }
|
129
|
+
|
130
|
+
expect(ec2.as_root { "Second run" }).to eql "
|
131
|
+
# Use ssh to bypass the sudo \"require tty\" setting
|
132
|
+
ssh -o \"StrictHostKeyChecking no\" -t -t ec2-user@localhost <<- EOF
|
133
|
+
sudo su -;
|
134
|
+
Second run
|
135
|
+
# exit twice, once for su and once for ssh
|
136
|
+
exit;
|
137
|
+
exit;
|
138
|
+
EOF
|
139
|
+
"
|
140
|
+
end
|
141
|
+
end
|
54
142
|
end
|
55
143
|
end
|
56
144
|
end
|
@@ -0,0 +1,32 @@
|
|
1
|
+
# Copyright Swipely, Inc. All rights reserved.
|
2
|
+
|
3
|
+
require 'spec_helper'
|
4
|
+
require 'pipely/deploy/bootstrap_registry'
|
5
|
+
|
6
|
+
describe Pipely::Deploy::BootstrapRegistry do
|
7
|
+
|
8
|
+
subject { described_class }
|
9
|
+
|
10
|
+
describe "#mixins" do
|
11
|
+
it "should default to empty" do
|
12
|
+
expect(subject.mixins).to be_empty
|
13
|
+
end
|
14
|
+
end
|
15
|
+
|
16
|
+
describe "#register_mixins" do
|
17
|
+
context "with a mixin" do
|
18
|
+
let(:mixin) { "Fixtures::BootstrapContexts::Green" }
|
19
|
+
let(:result) { [mixin] }
|
20
|
+
it "should registry mixin" do
|
21
|
+
expect(subject.register_mixins(mixin)).to eql(result)
|
22
|
+
expect(subject.mixins).to eql(result)
|
23
|
+
end
|
24
|
+
end
|
25
|
+
|
26
|
+
context "when a mixin cannot be required" do
|
27
|
+
it "should raise" do
|
28
|
+
expect { subject.register_mixins('bad::mixin') }.to raise_error
|
29
|
+
end
|
30
|
+
end
|
31
|
+
end
|
32
|
+
end
|
@@ -2,14 +2,15 @@
|
|
2
2
|
|
3
3
|
require 'spec_helper'
|
4
4
|
require 'pipely/deploy/bootstrap'
|
5
|
+
require 'pipely/deploy/bootstrap_registry'
|
5
6
|
require 'fileutils'
|
7
|
+
require 'fixtures/bootstrap_contexts/simple'
|
8
|
+
require 'fixtures/bootstrap_contexts/green'
|
6
9
|
|
7
10
|
describe Pipely::Deploy::Bootstrap do
|
8
11
|
|
9
|
-
subject { described_class.new(
|
10
|
-
|
11
|
-
let(:s3_uploader) { double }
|
12
|
-
|
12
|
+
subject { described_class.new(gem_files, s3_steps_path) }
|
13
|
+
let(:s3_steps_path) { 'a/test/path' }
|
13
14
|
let(:gem_files) do
|
14
15
|
{
|
15
16
|
'packaged-gem1' => '/path/to/cache/packaged-gem1.gem',
|
@@ -17,37 +18,53 @@ describe Pipely::Deploy::Bootstrap do
|
|
17
18
|
}
|
18
19
|
end
|
19
20
|
|
20
|
-
describe "#
|
21
|
-
|
22
|
-
|
21
|
+
describe "#context" do
|
22
|
+
context "without any mixins" do
|
23
|
+
let(:context) { subject.context }
|
24
|
+
|
25
|
+
it "should have s3 steps path" do
|
26
|
+
expect(context.s3_steps_path).to eq(s3_steps_path)
|
27
|
+
end
|
28
|
+
|
29
|
+
it "builds S3 urls to the uploaded gem files" do
|
30
|
+
expect(context.gem_files).to eq(gem_files)
|
31
|
+
end
|
23
32
|
end
|
24
33
|
|
25
|
-
|
26
|
-
|
34
|
+
context "with one mixin" do
|
35
|
+
let(:context) { subject.context( mixin.name ) }
|
36
|
+
let(:mixin) { Fixtures::BootstrapContexts::Simple }
|
27
37
|
|
28
|
-
|
38
|
+
it "should have Simple mixin method" do
|
39
|
+
expect(context.simple).to eq("simple")
|
40
|
+
end
|
29
41
|
end
|
30
|
-
end
|
31
42
|
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
43
|
+
context "with multiple mixins" do
|
44
|
+
let(:context) { subject.context( mixins.map(&:name) ) }
|
45
|
+
let(:mixins) do
|
46
|
+
[Fixtures::BootstrapContexts::Simple,Fixtures::BootstrapContexts::Green]
|
47
|
+
end
|
36
48
|
|
37
|
-
|
38
|
-
|
49
|
+
it "should have simple mixin method" do
|
50
|
+
expect(context.simple).to eq("simple")
|
51
|
+
end
|
39
52
|
|
40
|
-
|
41
|
-
|
53
|
+
it "should have green mixin method" do
|
54
|
+
expect(context.green).to eq("green")
|
42
55
|
end
|
43
56
|
end
|
44
57
|
|
45
|
-
|
46
|
-
|
47
|
-
|
58
|
+
context "with mixin from BootstrapRegistry" do
|
59
|
+
let(:context) { subject.context }
|
60
|
+
before do
|
61
|
+
Pipely::Deploy::BootstrapRegistry.instance.register_mixins(
|
62
|
+
"Fixtures::BootstrapContexts::Simple")
|
63
|
+
end
|
48
64
|
|
49
|
-
|
50
|
-
|
65
|
+
it "should have green mixin method" do
|
66
|
+
expect(context.green).to eq("green")
|
67
|
+
end
|
51
68
|
end
|
52
69
|
end
|
53
70
|
end
|