jenkins_pipeline_builder 0.10.11 → 0.10.12

Sign up to get free protection for your applications and to get access to all the features.
@@ -26,7 +26,7 @@ require 'json'
26
26
  module JenkinsPipelineBuilder
27
27
  class Generator
28
28
  attr_reader :debug
29
- attr_accessor :no_files, :job_templates, :job_collection, :logger, :module_registry, :remote_depends
29
+ attr_accessor :no_files, :job_templates, :logger, :module_registry, :job_collection
30
30
 
31
31
  # Initialize a Client object with Jenkins Api Client
32
32
  #
@@ -40,15 +40,9 @@ module JenkinsPipelineBuilder
40
40
  #
41
41
  def initialize
42
42
  @job_templates = {}
43
- @job_collection = {}
44
43
  @extensions = {}
45
- @remote_depends = {}
46
44
  @module_registry = ModuleRegistry.new
47
- end
48
-
49
- def debug=(value)
50
- @debug = value
51
- logger.level = (value) ? Logger::DEBUG : Logger::INFO
45
+ @job_collection = JobCollection.new
52
46
  end
53
47
 
54
48
  def logger
@@ -69,13 +63,12 @@ module JenkinsPipelineBuilder
69
63
 
70
64
  def bootstrap(path, project_name = nil)
71
65
  logger.info "Bootstrapping pipeline from path #{path}"
72
- load_collection_from_path(path)
73
- cleanup_temp_remote
66
+ job_collection.load_from_path(path)
74
67
  errors = {}
75
- if projects.any?
68
+ if job_collection.projects.any?
76
69
  errors = publish_project(project_name)
77
70
  else
78
- errors = publish_jobs(standalone jobs)
71
+ errors = publish_jobs(standalone job_collection.jobs)
79
72
  end
80
73
  errors.each do |k, v|
81
74
  logger.error "Encountered errors compiling: #{k}:"
@@ -85,13 +78,11 @@ module JenkinsPipelineBuilder
85
78
  end
86
79
 
87
80
  def pull_request(path, project_name)
88
- failed = false
89
81
  logger.info "Pull Request Generator Running from path #{path}"
90
- load_collection_from_path(path)
91
- cleanup_temp_remote
92
- logger.info "Project: #{projects}"
82
+ job_collection.load_from_path(path)
83
+ logger.info "Project: #{job_collection.projects}"
93
84
  errors = {}
94
- projects.each do |project|
85
+ job_collection.projects.each do |project|
95
86
  next unless project[:name] == project_name || project_name.nil?
96
87
  logger.info "Using Project #{project}"
97
88
  pull_job = find_pull_request_generator(project)
@@ -102,21 +93,21 @@ module JenkinsPipelineBuilder
102
93
  end
103
94
  jobs = filter_pull_request_jobs(pull_job)
104
95
  pull = JenkinsPipelineBuilder::PullRequestGenerator.new(project, jobs, p_payload)
105
- @job_collection.merge! pull.jobs
106
- success = create_pull_request_jobs(pull)
107
- failed = success unless success
96
+ @job_collection.collection.merge! pull.jobs
97
+ pull_errors = create_pull_request_jobs(pull)
98
+ errors.merge! pull_errors
108
99
  purge_pull_request_jobs(pull)
109
100
  end
110
101
  errors.each do |k, v|
111
102
  logger.error "Encountered errors compiling: #{k}:"
112
103
  logger.error v
113
104
  end
114
- !failed
105
+ errors.empty?
115
106
  end
116
107
 
117
108
  def file(path, project_name)
118
109
  logger.info "Generating files from path #{path}"
119
- @file_mode = true
110
+ JenkinsPipelineBuilder.file_mode!
120
111
  bootstrap(path, project_name)
121
112
  end
122
113
 
@@ -127,24 +118,12 @@ module JenkinsPipelineBuilder
127
118
  File.open(job_name + '.xml', 'w') { |f| f.write xml }
128
119
  end
129
120
 
130
- def out_dir
131
- 'out/xml'
132
- end
133
-
134
121
  #
135
122
  # BEGIN PRIVATE METHODS
136
123
  #
137
124
 
138
125
  private
139
126
 
140
- def create_or_update_job(job_name, xml)
141
- if client.job.exists?(job_name)
142
- client.job.update(job_name, xml)
143
- else
144
- client.job.create(job_name, xml)
145
- end
146
- end
147
-
148
127
  # Converts standalone jobs to the format that they have when loaded as part of a project.
149
128
  # This addresses an issue where #pubish_jobs assumes that each job will be wrapped
150
129
  # with in a hash a referenced under a key called :result, which is what happens when
@@ -166,16 +145,17 @@ module JenkinsPipelineBuilder
166
145
  end
167
146
 
168
147
  def create_pull_request_jobs(pull)
169
- success = false
148
+ errors = {}
170
149
  pull.create.each do |pull_project|
171
150
  success, compiled_project = resolve_project(pull_project)
172
151
  compiled_project[:value][:jobs].each do |i|
173
152
  job = i[:result]
174
- success, payload = compile_job_to_xml(job)
175
- create_or_update(job, payload) if success
153
+ job = Job.new job
154
+ success, payload = job.create_or_update
155
+ errors[job.name] = payload unless success
176
156
  end
177
157
  end
178
- success
158
+ errors
179
159
  end
180
160
 
181
161
  def find_pull_request_generator(project)
@@ -183,7 +163,7 @@ module JenkinsPipelineBuilder
183
163
  pull_job = nil
184
164
  project_jobs.each do |job|
185
165
  job = job.keys.first if job.is_a? Hash
186
- job = @job_collection[job.to_s]
166
+ job = @job_collection.collection[job.to_s]
187
167
  pull_job = job if job[:value][:job_type] == 'pull_request_generator'
188
168
  end
189
169
  fail 'No jobs of type pull_request_generator found' unless pull_job
@@ -195,9 +175,9 @@ module JenkinsPipelineBuilder
195
175
  pull_jobs = pull_job[:value][:jobs] || []
196
176
  pull_jobs.each do |job|
197
177
  if job.is_a? String
198
- jobs[job.to_s] = @job_collection[job.to_s]
178
+ jobs[job.to_s] = @job_collection.collection[job.to_s]
199
179
  else
200
- jobs[job.keys.first.to_s] = @job_collection[job.keys.first.to_s]
180
+ jobs[job.keys.first.to_s] = @job_collection.collection[job.keys.first.to_s]
201
181
  end
202
182
  end
203
183
  fail 'No jobs found for pull request' if jobs.empty?
@@ -205,179 +185,12 @@ module JenkinsPipelineBuilder
205
185
  end
206
186
 
207
187
  def compile_pull_request_generator(pull_job, project)
208
- defaults = find_defaults
188
+ defaults = job_collection.defaults
209
189
  settings = defaults.nil? ? {} : defaults[:value] || {}
210
190
  settings = Compiler.get_settings_bag(project, settings)
211
191
  resolve_job_by_name(pull_job, settings)
212
192
  end
213
193
 
214
- def load_collection_from_path(path, remote = false)
215
- load_extensions(path)
216
- path = File.expand_path(path, Dir.getwd)
217
- if File.directory?(path)
218
- logger.info "Generating from folder #{path}"
219
- Dir[File.join(path, '/*.{yaml,yml}')].each do |file|
220
- logger.info "Loading file #{file}"
221
- yaml = YAML.load_file(file)
222
- load_job_collection(yaml, remote)
223
- end
224
- Dir[File.join(path, '/*.json')].each do |file|
225
- logger.info "Loading file #{file}"
226
- json = JSON.parse(IO.read(file))
227
- load_job_collection(json, remote)
228
- end
229
- else
230
- logger.info "Loading file #{path}"
231
- if path.end_with? 'json'
232
- hash = JSON.parse(IO.read(path))
233
- else # elsif path.end_with?("yml") || path.end_with?("yaml")
234
- hash = YAML.load_file(path)
235
- end
236
- load_job_collection(hash, remote)
237
- end
238
- end
239
-
240
- def load_job_collection(yaml, remote = false)
241
- yaml.each do |section|
242
- Utils.symbolize_keys_deep!(section)
243
- key = section.keys.first
244
- value = section[key]
245
- if key == :dependencies
246
- logger.info 'Resolving Dependencies for remote project'
247
- load_remote_files(value)
248
- next
249
- end
250
- name = value[:name]
251
- if @job_collection.key?(name)
252
- existing_remote = @job_collection[name.to_s][:remote]
253
- # skip if the existing item is local and the new item is remote
254
- if remote && !existing_remote
255
- next
256
- # override if the existing item is remote and the new is local
257
- elsif existing_remote && !remote
258
- logger.info "Duplicate item with name '#{name}' was detected from the remote folder."
259
- else
260
- fail "Duplicate item with name '#{name}' was detected."
261
- end
262
- end
263
- @job_collection[name.to_s] = { name: name.to_s, type: key, value: value, remote: remote }
264
- end
265
- end
266
-
267
- def get_item(name)
268
- @job_collection[name.to_s]
269
- end
270
-
271
- def load_extensions(path)
272
- path = "#{path}/extensions"
273
- path = File.expand_path(path, Dir.getwd)
274
- return unless File.directory?(path)
275
- logger.info "Loading extensions from folder #{path}"
276
- logger.info Dir.glob("#{path}/*.rb").inspect
277
- Dir.glob("#{path}/*.rb").each do |file|
278
- logger.info "Loaded #{file}"
279
- require file
280
- end
281
- end
282
-
283
- def load_template(path, template)
284
- # If we specify what folder the yaml is in, load that
285
- if template[:folder]
286
- path = File.join(path, template[:folder])
287
- else
288
- path = File.join(path, template[:name]) unless template[:name] == 'default'
289
- # If we are looking for the newest version or no version was set
290
- if (template[:version].nil? || template[:version] == 'newest') && File.directory?(path)
291
- folders = Dir.entries(path)
292
- highest = folders.max
293
- template[:version] = highest unless highest == 0
294
- end
295
- path = File.join(path, template[:version]) unless template[:version].nil?
296
- path = File.join(path, 'pipeline')
297
- end
298
-
299
- if File.directory?(path)
300
- logger.info "Loading from #{path}"
301
- load_collection_from_path(path, true)
302
- true
303
- else
304
- false
305
- end
306
- end
307
-
308
- def download_yaml(url, file, remote_opts = {})
309
- @remote_depends[url] = file
310
- logger.info "Downloading #{url} to #{file}.tar"
311
- open("#{file}.tar", 'w') do |local_file|
312
- open(url, remote_opts) do |remote_file|
313
- local_file.write(Zlib::GzipReader.new(remote_file).read)
314
- end
315
- end
316
-
317
- # Extract Tar.gz to 'remote' folder
318
- logger.info "Unpacking #{file}.tar to #{file} folder"
319
- Archive::Tar::Minitar.unpack("#{file}.tar", file)
320
- end
321
-
322
- def load_remote_files(dependencies)
323
- ### Load remote YAML
324
- # Download Tar.gz
325
- dependencies.each do |source|
326
- source = source[:source]
327
- url = source[:url]
328
-
329
- file = "remote-#{@remote_depends.length}"
330
- if @remote_depends[url]
331
- file = @remote_depends[url]
332
- else
333
- opts = {}
334
- opts = { ssl_verify_mode: OpenSSL::SSL::VERIFY_NONE } if source[:verify_ssl] == false
335
- download_yaml(url, file, opts)
336
- end
337
-
338
- path = File.expand_path(file, Dir.getwd)
339
- # Load templates recursively
340
- unless source[:templates]
341
- logger.info 'No specific template specified'
342
- # Try to load the folder or the pipeline folder
343
- path = File.join(path, 'pipeline') if Dir.entries(path).include? 'pipeline'
344
- return load_collection_from_path(path, true)
345
- end
346
-
347
- load_templates(path, source[:templates])
348
- end
349
- end
350
-
351
- def load_templates(path, templates)
352
- templates.each do |template|
353
- version = template[:version] || 'newest'
354
- logger.info "Loading #{template[:name]} at version #{version}"
355
- # Move into the remote folder and look for the template folder
356
- remote = Dir.entries(path)
357
- if remote.include? template[:name]
358
- # We found the template name, load this path
359
- logger.info 'We found the template!'
360
- load_template(path, template)
361
- else
362
- # Many cases we must dig one layer deep
363
- remote.each do |file|
364
- load_template(File.join(path, file), template)
365
- end
366
- end
367
- end
368
- end
369
-
370
- def cleanup_temp_remote
371
- @remote_depends.each_value do |file|
372
- FileUtils.rm_r file
373
- FileUtils.rm_r "#{file}.tar"
374
- end
375
- end
376
-
377
- def list_plugins
378
- client.plugin.list_installed
379
- end
380
-
381
194
  def prepare_jobs(jobs)
382
195
  jobs.map! do |job|
383
196
  job.is_a?(String) ? { job.to_sym => {} } : job
@@ -387,7 +200,7 @@ module JenkinsPipelineBuilder
387
200
  def process_job_changes(jobs)
388
201
  jobs.each do |job|
389
202
  job_id = job.keys.first
390
- j = get_item(job_id)
203
+ j = job_collection.get_item(job_id)
391
204
 
392
205
  next unless j
393
206
 
@@ -414,22 +227,8 @@ module JenkinsPipelineBuilder
414
227
  errors
415
228
  end
416
229
 
417
- def process_jobs(jobs, project, errors = {})
418
- jobs.each do |job|
419
- job_id = job.keys.first
420
- settings = project[:settings].clone.merge(job[job_id])
421
- success, payload = resolve_job_by_name(job_id, settings)
422
- if success
423
- job[:result] = payload
424
- else
425
- errors[job_id] = payload
426
- end
427
- end
428
- errors
429
- end
430
-
431
230
  def resolve_project(project)
432
- defaults = find_defaults
231
+ defaults = job_collection.defaults
433
232
  settings = defaults.nil? ? {} : defaults[:value] || {}
434
233
  project[:settings] = Compiler.get_settings_bag(project, settings) unless project[:settings]
435
234
  project_body = project[:value]
@@ -451,41 +250,31 @@ module JenkinsPipelineBuilder
451
250
  [true, project]
452
251
  end
453
252
 
454
- def find_defaults
455
- @job_collection.each_value do |item|
456
- return item if item[:type] == 'defaults' || item[:type] == :defaults
253
+ def process_jobs(jobs, project, errors = {})
254
+ jobs.each do |job|
255
+ job_id = job.keys.first
256
+ settings = project[:settings].clone.merge(job[job_id])
257
+ success, payload = resolve_job_by_name(job_id, settings)
258
+ if success
259
+ job[:result] = payload
260
+ else
261
+ errors[job_id] = payload
262
+ end
457
263
  end
458
- # This is here for historical purposes
459
- get_item('global')
264
+ errors
460
265
  end
461
266
 
462
267
  def resolve_job_by_name(name, settings = {})
463
- job = get_item(name)
268
+ job = job_collection.get_item(name)
464
269
  fail "Failed to locate job by name '#{name}'" if job.nil?
465
270
  job_value = job[:value]
466
271
  logger.debug "Compiling job #{name}"
467
- success, payload = Compiler.compile(job_value, settings, @job_collection)
272
+ success, payload = Compiler.compile(job_value, settings, @job_collection.collection)
468
273
  [success, payload]
469
274
  end
470
275
 
471
- def projects
472
- result = []
473
- @job_collection.values.each do |item|
474
- result << item if item[:type] == :project
475
- end
476
- result
477
- end
478
-
479
- def jobs
480
- result = []
481
- @job_collection.values.each do |item|
482
- result << item if item[:type] == :job
483
- end
484
- result
485
- end
486
-
487
276
  def publish_project(project_name, errors = {})
488
- projects.each do |project|
277
+ job_collection.projects.each do |project|
489
278
  next unless project_name.nil? || project[:name] == project_name
490
279
  success, payload = resolve_project(project)
491
280
  if success
@@ -510,129 +299,11 @@ module JenkinsPipelineBuilder
510
299
  logger.info "Processing #{i}"
511
300
  job = i[:result]
512
301
  fail "Result is empty for #{i}" if job.nil?
513
- success, payload = compile_job_to_xml(job)
514
- if success
515
- create_or_update(job, payload)
516
- else
517
- errors[job[:name]] = payload
518
- end
302
+ job = Job.new job
303
+ success, payload = job.create_or_update
304
+ errors[job.name] = payload unless success
519
305
  end
520
306
  errors
521
307
  end
522
-
523
- def create_or_update(job, xml)
524
- job_name = job[:name]
525
- if @debug || @file_mode
526
- logger.info "Will create job #{job}"
527
- logger.info "#{xml}" if @debug
528
- FileUtils.mkdir_p(out_dir) unless File.exist?(out_dir)
529
- File.open("#{out_dir}/#{job_name}.xml", 'w') { |f| f.write xml }
530
- return
531
- end
532
-
533
- create_or_update_job job_name, xml
534
- end
535
-
536
- def compile_job_to_xml(job)
537
- fail 'Job name is not specified' unless job[:name]
538
-
539
- logger.info "Creating Yaml Job #{job}"
540
- job[:job_type] = 'free_style' unless job[:job_type]
541
- case job[:job_type]
542
- when 'job_dsl'
543
- xml = compile_freestyle_job_to_xml(job)
544
- payload = update_job_dsl(job, xml)
545
- when 'multi_project'
546
- xml = compile_freestyle_job_to_xml(job)
547
- payload = adjust_multi_project xml
548
- when 'build_flow'
549
- xml = compile_freestyle_job_to_xml(job)
550
- payload = add_job_dsl(job, xml)
551
- when 'free_style', 'pull_request_generator'
552
- payload = compile_freestyle_job_to_xml job
553
- else
554
- return false, "Job type: #{job[:job_type]} is not one of job_dsl, multi_project, build_flow or free_style"
555
- end
556
-
557
- [true, payload]
558
- end
559
-
560
- def adjust_multi_project(xml)
561
- n_xml = Nokogiri::XML(xml)
562
- root = n_xml.root
563
- root.name = 'com.tikal.jenkins.plugins.multijob.MultiJobProject'
564
- n_xml.to_xml
565
- end
566
-
567
- def compile_freestyle_job_to_xml(params)
568
- if params.key?(:template)
569
- template_name = params[:template]
570
- fail "Job template '#{template_name}' can't be resolved." unless @job_templates.key?(template_name)
571
- params.delete(:template)
572
- template = @job_templates[template_name]
573
- puts "Template found: #{template}"
574
- params = template.deep_merge(params)
575
- puts "Template merged: #{template}"
576
- end
577
-
578
- xml = client.job.build_freestyle_config(params)
579
- n_xml = Nokogiri::XML(xml, &:noblanks)
580
-
581
- logger.debug 'Loading the required modules'
582
- @module_registry.traverse_registry_path('job', params, n_xml)
583
- logger.debug 'Module loading complete'
584
-
585
- n_xml.to_xml
586
- end
587
-
588
- def add_job_dsl(job, xml)
589
- n_xml = Nokogiri::XML(xml)
590
- n_xml.root.name = 'com.cloudbees.plugins.flow.BuildFlow'
591
- Nokogiri::XML::Builder.with(n_xml.root) do |b_xml|
592
- b_xml.dsl job[:build_flow]
593
- end
594
- n_xml.to_xml
595
- end
596
-
597
- # TODO: make sure this is tested
598
- def update_job_dsl(job, xml)
599
- n_xml = Nokogiri::XML(xml)
600
- n_builders = n_xml.xpath('//builders').first
601
- Nokogiri::XML::Builder.with(n_builders) do |b_xml|
602
- build_job_dsl(job, b_xml)
603
- end
604
- n_xml.to_xml
605
- end
606
-
607
- def generate_job_dsl_body(params)
608
- logger.info 'Generating pipeline'
609
-
610
- xml = client.job.build_freestyle_config(params)
611
-
612
- n_xml = Nokogiri::XML(xml)
613
- if n_xml.xpath('//javaposse.jobdsl.plugin.ExecuteDslScripts').empty?
614
- p_xml = Nokogiri::XML::Builder.new(encoding: 'UTF-8') do |b_xml|
615
- build_job_dsl(params, b_xml)
616
- end
617
-
618
- n_xml.xpath('//builders').first.add_child("\r\n" + p_xml.doc.root.to_xml(indent: 4) + "\r\n")
619
- xml = n_xml.to_xml
620
- end
621
- xml
622
- end
623
-
624
- def build_job_dsl(job, xml)
625
- xml.send('javaposse.jobdsl.plugin.ExecuteDslScripts') do
626
- if job.key?(:job_dsl)
627
- xml.scriptText job[:job_dsl]
628
- xml.usingScriptText true
629
- else
630
- xml.targets job[:job_dsl_targets]
631
- xml.usingScriptText false
632
- end
633
- xml.ignoreExisting false
634
- xml.removedJobAction 'IGNORE'
635
- end
636
- end
637
308
  end
638
309
  end