jenkins_pipeline_builder 0.10.11 → 0.10.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,128 @@
1
+ module JenkinsPipelineBuilder
2
+ class Job
3
+ attr_accessor :job
4
+
5
+ def initialize(job)
6
+ @job = job
7
+ end
8
+
9
+ def name
10
+ job[:name]
11
+ end
12
+
13
+ def logger
14
+ JenkinsPipelineBuilder.logger
15
+ end
16
+
17
+ def create_or_update
18
+ success, payload = to_xml
19
+ return success, payload unless success
20
+ xml = payload
21
+ if JenkinsPipelineBuilder.debug || JenkinsPipelineBuilder.file_mode
22
+ logger.info "Will create job #{job}"
23
+ logger.info "#{xml}" if @debug
24
+ FileUtils.mkdir_p(out_dir) unless File.exist?(out_dir)
25
+ File.open("#{out_dir}/#{name}.xml", 'w') { |f| f.write xml }
26
+ return [true, nil]
27
+ end
28
+
29
+ if JenkinsPipelineBuilder.client.job.exists?(name)
30
+ JenkinsPipelineBuilder.client.job.update(name, xml)
31
+ else
32
+ JenkinsPipelineBuilder.client.job.create(name, xml)
33
+ end
34
+ [true, nil]
35
+ end
36
+
37
+ def to_xml
38
+ fail 'Job name is not specified' unless name
39
+
40
+ logger.info "Creating Yaml Job #{job}"
41
+ job[:job_type] = 'free_style' unless job[:job_type]
42
+ case job[:job_type]
43
+ when 'job_dsl'
44
+ @xml = setup_freestyle_base(job)
45
+ payload = update_job_dsl
46
+ when 'multi_project'
47
+ @xml = setup_freestyle_base(job)
48
+ payload = adjust_multi_project
49
+ when 'build_flow'
50
+ @xml = setup_freestyle_base(job)
51
+ payload = add_job_dsl
52
+ when 'free_style', 'pull_request_generator'
53
+ payload = setup_freestyle_base job
54
+ else
55
+ return false, "Job type: #{job[:job_type]} is not one of job_dsl, multi_project, build_flow or free_style"
56
+ end
57
+
58
+ [true, payload]
59
+ end
60
+
61
+ private
62
+
63
+ def out_dir
64
+ 'out/xml'
65
+ end
66
+
67
+ def update_job_dsl
68
+ n_xml = Nokogiri::XML(@xml)
69
+ n_builders = n_xml.xpath('//builders').first
70
+ Nokogiri::XML::Builder.with(n_builders) do |b_xml|
71
+ build_job_dsl(job, b_xml)
72
+ end
73
+ n_xml.to_xml
74
+ end
75
+
76
+ def build_job_dsl(job, xml)
77
+ xml.send('javaposse.jobdsl.plugin.ExecuteDslScripts') do
78
+ if job.key?(:job_dsl)
79
+ xml.scriptText job[:job_dsl]
80
+ xml.usingScriptText true
81
+ else
82
+ xml.targets job[:job_dsl_targets]
83
+ xml.usingScriptText false
84
+ end
85
+ xml.ignoreExisting false
86
+ xml.removedJobAction 'IGNORE'
87
+ end
88
+ end
89
+
90
+ def adjust_multi_project
91
+ n_xml = Nokogiri::XML(@xml)
92
+ root = n_xml.root
93
+ root.name = 'com.tikal.jenkins.plugins.multijob.MultiJobProject'
94
+ n_xml.to_xml
95
+ end
96
+
97
+ def add_job_dsl
98
+ n_xml = Nokogiri::XML(@xml)
99
+ n_xml.root.name = 'com.cloudbees.plugins.flow.BuildFlow'
100
+ Nokogiri::XML::Builder.with(n_xml.root) do |b_xml|
101
+ b_xml.dsl job[:build_flow]
102
+ end
103
+ n_xml.to_xml
104
+ end
105
+
106
+ def setup_freestyle_base(params)
107
+ # I'm pretty unclear what these templates are...
108
+ if params.key?(:template)
109
+ template_name = params[:template]
110
+ fail "Job template '#{template_name}' can't be resolved." unless @job_templates.key?(template_name)
111
+ params.delete(:template)
112
+ template = @job_templates[template_name]
113
+ puts "Template found: #{template}"
114
+ params = template.deep_merge(params)
115
+ puts "Template merged: #{template}"
116
+ end
117
+
118
+ xml = JenkinsPipelineBuilder.client.job.build_freestyle_config(params)
119
+ n_xml = Nokogiri::XML(xml, &:noblanks)
120
+
121
+ logger.debug 'Loading the required modules'
122
+ JenkinsPipelineBuilder.registry.traverse_registry_path('job', params, n_xml)
123
+ logger.debug 'Module loading complete'
124
+
125
+ n_xml.to_xml
126
+ end
127
+ end
128
+ end
@@ -0,0 +1,112 @@
1
+ module JenkinsPipelineBuilder
2
+ class JobCollection
3
+ attr_accessor :collection, :remote_dependencies
4
+
5
+ def initialize
6
+ @collection = {}
7
+ @remote_dependencies = RemoteDependencies.new self
8
+ end
9
+
10
+ def clear_remote_dependencies
11
+ @remote_dependencies = RemoteDependencies.new self
12
+ end
13
+
14
+ def logger
15
+ JenkinsPipelineBuilder.logger
16
+ end
17
+
18
+ def projects
19
+ result = []
20
+ collection.values.each do |item|
21
+ result << item if item[:type] == :project
22
+ end
23
+ result
24
+ end
25
+
26
+ def jobs
27
+ result = []
28
+ collection.values.each do |item|
29
+ result << item if item[:type] == :job
30
+ end
31
+ result
32
+ end
33
+
34
+ def defaults
35
+ collection.each_value do |item|
36
+ return item if item[:type] == 'defaults' || item[:type] == :defaults
37
+ end
38
+ # This is here for historical purposes
39
+ get_item('global')
40
+ end
41
+
42
+ def get_item(name)
43
+ collection[name.to_s]
44
+ end
45
+
46
+ def load_from_path(path, remote = false)
47
+ load_extensions(path)
48
+ path = File.expand_path(path, Dir.getwd)
49
+ if File.directory?(path)
50
+ logger.info "Generating from folder #{path}"
51
+ Dir[File.join(path, '/*.{yaml,yml}')].each do |file|
52
+ logger.info "Loading file #{file}"
53
+ yaml = YAML.load_file(file)
54
+ load_file(yaml, remote)
55
+ end
56
+ Dir[File.join(path, '/*.json')].each do |file|
57
+ logger.info "Loading file #{file}"
58
+ json = JSON.parse(IO.read(file))
59
+ load_file(json, remote)
60
+ end
61
+ else
62
+ logger.info "Loading file #{path}"
63
+ if path.end_with? 'json'
64
+ hash = JSON.parse(IO.read(path))
65
+ else # elsif path.end_with?("yml") || path.end_with?("yaml")
66
+ hash = YAML.load_file(path)
67
+ end
68
+ load_file(hash, remote)
69
+ end
70
+ remote_dependencies.cleanup if remote
71
+ end
72
+
73
+ def load_file(yaml, remote = false)
74
+ yaml.each do |section|
75
+ Utils.symbolize_keys_deep!(section)
76
+ key = section.keys.first
77
+ value = section[key]
78
+ if key == :dependencies
79
+ logger.info 'Resolving Dependencies for remote project'
80
+ remote_dependencies.load value
81
+ next
82
+ end
83
+ name = value[:name]
84
+ if collection.key?(name)
85
+ existing_remote = collection[name.to_s][:remote]
86
+ # skip if the existing item is local and the new item is remote
87
+ if remote && !existing_remote
88
+ next
89
+ # override if the existing item is remote and the new is local
90
+ elsif existing_remote && !remote
91
+ logger.info "Duplicate item with name '#{name}' was detected from the remote folder."
92
+ else
93
+ fail "Duplicate item with name '#{name}' was detected."
94
+ end
95
+ end
96
+ collection[name.to_s] = { name: name.to_s, type: key, value: value, remote: remote }
97
+ end
98
+ end
99
+
100
+ def load_extensions(path)
101
+ path = "#{path}/extensions"
102
+ path = File.expand_path(path, Dir.getwd)
103
+ return unless File.directory?(path)
104
+ logger.info "Loading extensions from folder #{path}"
105
+ logger.info Dir.glob("#{path}/*.rb").inspect
106
+ Dir.glob("#{path}/*.rb").each do |file|
107
+ logger.info "Loaded #{file}"
108
+ require file
109
+ end
110
+ end
111
+ end
112
+ end
@@ -29,7 +29,7 @@ module JenkinsPipelineBuilder
29
29
 
30
30
  def versions
31
31
  # Return a hash with a default of 1000 so that we'll get the newest in debug
32
- return Hash.new { |_| '1000.0' } if JenkinsPipelineBuilder.generator.debug
32
+ return Hash.new { |_| '1000.0' } if JenkinsPipelineBuilder.debug
33
33
  @versions ||= JenkinsPipelineBuilder.client.plugin.list_installed
34
34
  end
35
35
 
@@ -56,7 +56,7 @@ module JenkinsPipelineBuilder
56
56
 
57
57
  def override_git_2_params(job)
58
58
  job[:value][:scm_params] ||= {}
59
- job[:value][:scm_params][:changelog_to_branch] = { remote: 'origin', branch: 'pr-{{pull_request_number}}' }
59
+ job[:value][:scm_params][:changelog_to_branch] = { remote: 'origin', branch: 'pr/{{pull_request_number}}/head' }
60
60
  end
61
61
 
62
62
  # Change the git branch for each job
@@ -0,0 +1,112 @@
1
+ module JenkinsPipelineBuilder
2
+ class RemoteDependencies
3
+ attr_reader :job_collection, :entries
4
+
5
+ def initialize(job_collection)
6
+ @entries = {}
7
+ @job_collection = job_collection
8
+ end
9
+
10
+ def logger
11
+ JenkinsPipelineBuilder.logger
12
+ end
13
+
14
+ def cleanup
15
+ entries.each_value do |file|
16
+ FileUtils.rm_r file
17
+ FileUtils.rm_r "#{file}.tar"
18
+ end
19
+ end
20
+
21
+ # TODO: Look into remote jobs not working according to sinan
22
+
23
+ def load(dependencies)
24
+ ### Load remote YAML
25
+ # Download Tar.gz
26
+ dependencies.each do |source|
27
+ source = source[:source]
28
+ url = source[:url]
29
+
30
+ file = "remote-#{entries.length}"
31
+ if entries[url]
32
+ file = entries[url]
33
+ else
34
+ opts = {}
35
+ opts = { ssl_verify_mode: OpenSSL::SSL::VERIFY_NONE } if source[:verify_ssl] == false
36
+ download_yaml(url, file, opts)
37
+ end
38
+
39
+ path = File.expand_path(file, Dir.getwd)
40
+ # Load templates recursively
41
+ unless source[:templates]
42
+ logger.info 'No specific template specified'
43
+ # Try to load the folder or the pipeline folder
44
+ path = File.join(path, 'pipeline') if Dir.entries(path).include? 'pipeline'
45
+ return job_collection.load_from_path(path, true)
46
+ end
47
+
48
+ load_templates(path, source[:templates])
49
+ end
50
+ end
51
+
52
+ private
53
+
54
+ def load_template(path, template)
55
+ # If we specify what folder the yaml is in, load that
56
+ if template[:folder]
57
+ path = File.join(path, template[:folder])
58
+ else
59
+ path = File.join(path, template[:name]) unless template[:name] == 'default'
60
+ # If we are looking for the newest version or no version was set
61
+ if (template[:version].nil? || template[:version] == 'newest') && File.directory?(path)
62
+ folders = Dir.entries(path)
63
+ highest = folders.max
64
+ template[:version] = highest unless highest == 0
65
+ end
66
+ path = File.join(path, template[:version]) unless template[:version].nil?
67
+ path = File.join(path, 'pipeline')
68
+ end
69
+
70
+ if File.directory?(path)
71
+ logger.info "Loading from #{path}"
72
+ job_collection.load_from_path(path, true)
73
+ true
74
+ else
75
+ false
76
+ end
77
+ end
78
+
79
+ def download_yaml(url, file, remote_opts = {})
80
+ entries[url] = file
81
+ logger.info "Downloading #{url} to #{file}.tar"
82
+ open("#{file}.tar", 'w') do |local_file|
83
+ open(url, remote_opts) do |remote_file|
84
+ local_file.write(Zlib::GzipReader.new(remote_file).read)
85
+ end
86
+ end
87
+
88
+ # Extract Tar.gz to 'remote' folder
89
+ logger.info "Unpacking #{file}.tar to #{file} folder"
90
+ Archive::Tar::Minitar.unpack("#{file}.tar", file)
91
+ end
92
+
93
+ def load_templates(path, templates)
94
+ templates.each do |template|
95
+ version = template[:version] || 'newest'
96
+ logger.info "Loading #{template[:name]} at version #{version}"
97
+ # Move into the remote folder and look for the template folder
98
+ remote = Dir.entries(path)
99
+ if remote.include? template[:name]
100
+ # We found the template name, load this path
101
+ logger.info 'We found the template!'
102
+ load_template(path, template)
103
+ else
104
+ # Many cases we must dig one layer deep
105
+ remote.each do |file|
106
+ load_template(File.join(path, file), template)
107
+ end
108
+ end
109
+ end
110
+ end
111
+ end
112
+ end
@@ -21,5 +21,5 @@
21
21
  #
22
22
 
23
23
  module JenkinsPipelineBuilder
24
- VERSION = '0.10.11'
24
+ VERSION = '0.10.12'
25
25
  end
@@ -67,12 +67,12 @@ module JenkinsPipelineBuilder
67
67
  def create(params)
68
68
  # Name is a required parameter. Raise an error if not specified
69
69
  fail ArgumentError, 'Name is required for creating view' unless params.is_a?(Hash) && params[:name]
70
- clean_up_views(params) unless @generator.debug
70
+ clean_up_views(params) unless JenkinsPipelineBuilder.debug
71
71
  params[:type] ||= 'listview'
72
72
  create_base_view(params[:name], params[:type], params[:parent_view])
73
73
  @logger.debug "Creating a #{params[:type]} view with params: #{params.inspect}"
74
74
 
75
- if @generator.debug
75
+ if JenkinsPipelineBuilder.debug
76
76
  # pp post_params(params)
77
77
  return
78
78
  end
@@ -156,7 +156,7 @@ module JenkinsPipelineBuilder
156
156
  }.to_json
157
157
  }
158
158
 
159
- if @generator.debug
159
+ if JenkinsPipelineBuilder.debug
160
160
  # pp initial_post_params
161
161
  return
162
162
  end
@@ -2,17 +2,6 @@ require File.expand_path('../../spec_helper', __FILE__)
2
2
 
3
3
  describe JenkinsPipelineBuilder::CLI::Helper do
4
4
  context '#setup' do
5
- let(:generator) do
6
- instance_double(
7
- JenkinsPipelineBuilder::Generator,
8
- :debug= => true
9
- )
10
- end
11
-
12
- before(:each) do
13
- allow(JenkinsPipelineBuilder).to receive(:generator).and_return(generator)
14
- end
15
-
16
5
  context 'username and password given' do
17
6
  let(:options) do
18
7
  {
@@ -92,6 +81,7 @@ describe JenkinsPipelineBuilder::CLI::Helper do
92
81
  server_ip: :baz
93
82
  }
94
83
  expect(JenkinsPipelineBuilder).to receive(:credentials=).with(expected_options)
84
+ expect(JenkinsPipelineBuilder).to receive(:debug!).and_return true
95
85
  described_class.setup(options)
96
86
  end
97
87
 
@@ -1,11 +1,5 @@
1
1
  require File.expand_path('../spec_helper', __FILE__)
2
2
 
3
- def cleanup_compiled_xml(job_name)
4
- Dir["#{@generator.out_dir}/#{job_name}*.xml"].each do |file|
5
- File.delete(file)
6
- end
7
- end
8
-
9
3
  describe JenkinsPipelineBuilder::Generator do
10
4
  after :each do
11
5
  JenkinsPipelineBuilder.registry.clear_versions
@@ -23,14 +17,13 @@ describe JenkinsPipelineBuilder::Generator do
23
17
  end
24
18
 
25
19
  after(:each) do
26
- @generator.debug = false
27
- @generator.job_collection = {}
28
- @generator.remote_depends = {}
20
+ JenkinsPipelineBuilder.no_debug!
21
+ @generator.job_collection = JenkinsPipelineBuilder::JobCollection.new
29
22
  end
30
23
 
31
24
  describe 'initialized in before(:example)' do
32
25
  it 'creates a new generator' do
33
- expect(@generator.job_collection).to be_empty
26
+ expect(@generator.job_collection.collection).to be_empty
34
27
  expect(@generator.module_registry).not_to be_nil
35
28
  end
36
29
 
@@ -38,23 +31,9 @@ describe JenkinsPipelineBuilder::Generator do
38
31
  job_name = 'sample_job'
39
32
  job_type = 'project'
40
33
  job_value = {}
41
- @generator.job_collection[job_name] = { name: job_name, type: job_type, value: job_value }
42
- expect(@generator.job_collection).not_to be_empty
43
- expect(@generator.job_collection[job_name]).not_to be_nil
44
- end
45
- end
46
-
47
- describe '#debug=' do
48
- it 'sets debug mode to false' do
49
- @generator.debug = false
50
- expect(@generator.debug).to be false
51
- expect(@generator.logger.level).to eq(Logger::INFO)
52
- end
53
-
54
- it 'sets debug mode to true' do
55
- @generator.debug = true
56
- expect(@generator.debug).to be true
57
- expect(@generator.logger.level).to eq(Logger::DEBUG)
34
+ @generator.job_collection.collection[job_name] = { name: job_name, type: job_type, value: job_value }
35
+ expect(@generator.job_collection.collection).not_to be_empty
36
+ expect(@generator.job_collection.collection[job_name]).not_to be_nil
58
37
  end
59
38
  end
60
39
 
@@ -79,9 +58,8 @@ describe JenkinsPipelineBuilder::Generator do
79
58
  end
80
59
 
81
60
  def bootstrap(fixture_path, job_name)
82
- @generator.debug = true
61
+ JenkinsPipelineBuilder.debug!
83
62
  errors = @generator.bootstrap(fixture_path, job_name)
84
- cleanup_compiled_xml(job_name)
85
63
  errors
86
64
  end
87
65
 
@@ -114,7 +92,7 @@ describe JenkinsPipelineBuilder::Generator do
114
92
  it 'overrides the remote dependencies with local ones' do
115
93
  errors = bootstrap(fixture_path('local_override/remote_and_local'), 'TemplatePipeline')
116
94
  expect(errors).to be_empty
117
- expect(@generator.job_collection['{{name}}-10'][:value][:description]).to eq('Overridden stuff')
95
+ expect(@generator.job_collection.collection['{{name}}-10'][:value][:description]).to eq('Overridden stuff')
118
96
  end
119
97
 
120
98
  it 'fails to override when there are duplicate local items' do
@@ -133,7 +111,7 @@ describe JenkinsPipelineBuilder::Generator do
133
111
  before :each do
134
112
  allow(JenkinsPipelineBuilder.client).to receive(:plugin).and_return double(
135
113
  list_installed: { 'description' => '20.0', 'git' => '20.0' })
136
- @generator.debug = true
114
+ JenkinsPipelineBuilder.debug!
137
115
  end
138
116
  let(:jobs) do
139
117
  {
@@ -174,7 +152,6 @@ describe JenkinsPipelineBuilder::Generator do
174
152
  )
175
153
  success = @generator.pull_request(path, job_name)
176
154
  expect(success).to be_truthy
177
- cleanup_compiled_xml(job_name)
178
155
  end
179
156
 
180
157
  it 'correclty creates jobs when there are multiple pulls open' do
@@ -196,18 +173,21 @@ describe JenkinsPipelineBuilder::Generator do
196
173
  expect(JenkinsPipelineBuilder::PullRequestGenerator).to receive(:new).once.and_return(
197
174
  double(purge: purge, create: create, jobs: jobs)
198
175
  )
199
- expect(@generator).to receive(:compile_job_to_xml).once.with(
176
+ job1 = double name: 'job name'
177
+ job2 = double name: 'job name'
178
+ expect(JenkinsPipelineBuilder::Job).to receive(:new).once.with(
200
179
  name: 'PullRequest-PR1-10-SampleJob', scm_branch: 'origin/pr/1/head', scm_params: {
201
180
  refspec: 'refs/pull/*:refs/remotes/origin/pr/*'
202
181
  }
203
- )
204
- expect(@generator).to receive(:compile_job_to_xml).once.with(
182
+ ).and_return job1
183
+ expect(JenkinsPipelineBuilder::Job).to receive(:new).once.with(
205
184
  name: 'PullRequest-PR2-10-SampleJob', scm_branch: 'origin/pr/2/head', scm_params: {
206
185
  refspec: 'refs/pull/*:refs/remotes/origin/pr/*'
207
186
  }
208
- )
187
+ ).and_return job2
188
+ expect(job1).to receive(:create_or_update).and_return true
189
+ expect(job2).to receive(:create_or_update).and_return true
209
190
  expect(@generator.pull_request(path, job_name)).to be_truthy
210
- cleanup_compiled_xml(job_name)
211
191
  end
212
192
  # Things to check for
213
193
  # Fail - no PR job type found
@@ -226,29 +206,28 @@ describe JenkinsPipelineBuilder::Generator do
226
206
  }]
227
207
  end
228
208
 
209
+ before :each do
210
+ expect(@generator.job_collection).to receive(:load_file).once.with(view_hash, false).and_return(true)
211
+ expect(@generator.job_collection).to receive(:load_file).once.with(project_hash, false).and_return(true)
212
+ end
213
+
229
214
  it 'loads a yaml collection from a path' do
230
215
  path = File.expand_path('../fixtures/generator_tests/test_yaml_files', __FILE__)
231
- expect(@generator).to receive(:load_job_collection).once.with(view_hash, false).and_return(true)
232
- expect(@generator).to receive(:load_job_collection).once.with(project_hash, false).and_return(true)
233
- @generator.send(:load_collection_from_path, path)
216
+ @generator.job_collection.send(:load_from_path, path)
234
217
  end
235
218
  it 'loads a json collection from a path' do
236
219
  path = File.expand_path('../fixtures/generator_tests/test_json_files', __FILE__)
237
- expect(@generator).to receive(:load_job_collection).once.with(view_hash, false).and_return(true)
238
- expect(@generator).to receive(:load_job_collection).once.with(project_hash, false).and_return(true)
239
- @generator.send(:load_collection_from_path, path)
220
+ @generator.job_collection.send(:load_from_path, path)
240
221
  end
241
222
  it 'loads both yaml and json files from a path' do
242
223
  path = File.expand_path('../fixtures/generator_tests/test_combo_files', __FILE__)
243
- expect(@generator).to receive(:load_job_collection).once.with(view_hash, false).and_return(true)
244
- expect(@generator).to receive(:load_job_collection).once.with(project_hash, false).and_return(true)
245
- @generator.send(:load_collection_from_path, path)
224
+ @generator.job_collection.send(:load_from_path, path)
246
225
  end
247
226
  end
248
227
 
249
228
  describe '#dump' do
250
229
  it "writes a job's config XML to a file" do
251
- @generator.debug = true
230
+ JenkinsPipelineBuilder.debug!
252
231
  job_name = 'test_job'
253
232
  body = ''
254
233
  test_path = File.expand_path('../fixtures/generator_tests', __FILE__)