manifest-builder 0.4.0 → 0.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/Gemfile.lock +2 -2
- data/lib/cicd/builder/manifest/mixlib/build.rb +304 -318
- data/lib/cicd/builder/manifest/mixlib/repo/artifactory.rb +163 -0
- data/lib/cicd/builder/manifest/mixlib/repo.rb +44 -0
- data/lib/cicd/builder/manifest/version.rb +1 -1
- data/lib/cicd/builder/manifest.rb +6 -2
- metadata +4 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 17e7567ae1c78580e2ecc9f74f593764ff2dab7f
|
4
|
+
data.tar.gz: db1ca34f46fc756802efbc861ae6666345328109
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 70b1e865b1d772c0077e2af4291bc4131f3199dd3792c0dafb540b5b96743d496abb54e40745e93cdc0e9709a1474fd1eb40760d30b8d0174ea25625f537ff82
|
7
|
+
data.tar.gz: 7fb9a46b85de867913b9f22766b124e18f99b7b497a5e1a75d69c82b6fb9425fbc3a184178943b99f1655ec9c3c22e37adf0e1a43354d9971b27cf3c5c764326
|
data/Gemfile.lock
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
PATH
|
2
2
|
remote: .
|
3
3
|
specs:
|
4
|
-
manifest-builder (0.
|
4
|
+
manifest-builder (0.5.0)
|
5
5
|
archive-tar-minitar (= 0.5.2)
|
6
6
|
chefrepo-builder (>= 0.9.27, < 1.1)
|
7
7
|
cicd-builder (>= 0.9.33, < 1.1)
|
@@ -18,7 +18,7 @@ PATH
|
|
18
18
|
PATH
|
19
19
|
remote: ../cicd-builder
|
20
20
|
specs:
|
21
|
-
cicd-builder (0.9.
|
21
|
+
cicd-builder (0.9.35)
|
22
22
|
artifactory (>= 2.2.1, < 2.3)
|
23
23
|
awesome_print (>= 1.6, < 2.0)
|
24
24
|
aws-sdk (>= 2.0, < 2.1)
|
@@ -5,376 +5,362 @@ module CiCd
|
|
5
5
|
# noinspection RubySuperCallWithoutSuperclassInspection
|
6
6
|
module Manifest
|
7
7
|
module Build
|
8
|
-
# module ClassMethods
|
9
|
-
# ---------------------------------------------------------------------------------------------------------------
|
10
|
-
def self.included(includer)
|
11
|
-
end
|
12
8
|
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
9
|
+
# ---------------------------------------------------------------------------------------------------------------
|
10
|
+
# noinspection RubyHashKeysTypesInspection
|
11
|
+
def prepareBuild()
|
12
|
+
ret = super
|
13
|
+
if ret == 0
|
14
|
+
@vars[:artifacts] = []
|
15
|
+
yaml = YAML.load(IO.read(ENV['MANIFEST_FILE']))
|
16
|
+
keys = Hash[yaml.keys.map.with_index.to_a].keys.sort
|
17
|
+
# @logger.info keys.ai
|
18
|
+
ordr = []
|
19
|
+
bads = []
|
20
|
+
apps = {}
|
21
|
+
vars = {}
|
26
22
|
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
23
|
+
rmap = {
|
24
|
+
sha256: %w[_sha256],
|
25
|
+
base_url: %w[_repo_base_url],
|
26
|
+
url: %w[_url],
|
27
|
+
version: %w[_app_version],
|
28
|
+
build: %w[_app_build],
|
29
|
+
}
|
34
30
|
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
end
|
31
|
+
keys.each do |prod|
|
32
|
+
rmap.keys.each do |var|
|
33
|
+
vars[var] = ''
|
34
|
+
end
|
35
|
+
name = ''
|
36
|
+
match = nil
|
37
|
+
rmap.each do |var,lst|
|
38
|
+
lst.each do |regexstr|
|
39
|
+
match = prod.match(%r'^(.*?)#{regexstr}$')
|
40
|
+
if match
|
41
|
+
name = match[1]
|
42
|
+
vars[var] = yaml[prod]
|
43
|
+
break
|
49
44
|
end
|
50
|
-
break if match
|
51
|
-
end
|
52
|
-
if match
|
53
|
-
ordr << name
|
54
|
-
unless apps[name]
|
55
|
-
apps[name] = { name: name, }
|
56
|
-
end
|
57
|
-
rmap.keys.each do |var|
|
58
|
-
apps[name][var] = vars[var] unless vars[var].empty?
|
59
|
-
end
|
60
|
-
else
|
61
|
-
bads << prod
|
62
45
|
end
|
46
|
+
break if match
|
63
47
|
end
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
48
|
+
if match
|
49
|
+
ordr << name
|
50
|
+
unless apps[name]
|
51
|
+
apps[name] = { name: name, }
|
52
|
+
end
|
53
|
+
rmap.keys.each do |var|
|
54
|
+
apps[name][var] = vars[var] unless vars[var].empty?
|
55
|
+
end
|
68
56
|
else
|
69
|
-
|
57
|
+
bads << prod
|
70
58
|
end
|
71
59
|
end
|
72
|
-
@
|
60
|
+
@logger.debug "App entries: #{apps.ai}"
|
61
|
+
if bads.size > 0
|
62
|
+
@logger.fatal "Bad entries: #{bads.map{|p| "#{p}: #{yaml[p]}"}.ai}"
|
63
|
+
ret = Errors::BAD_ARTIFACTS
|
64
|
+
else
|
65
|
+
@vars[:components] = apps
|
66
|
+
end
|
73
67
|
end
|
68
|
+
@vars[:return_code] = ret
|
69
|
+
end
|
74
70
|
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
version
|
85
|
-
|
86
|
-
# if artifact =~ %r'^#{comp[0]}'
|
87
|
-
# version.gsub!(%r'^#{comp[0]}\.*-*','')
|
88
|
-
# else
|
89
|
-
# version.gsub!(%r'^[a-zA-Z\-._]+','')
|
90
|
-
# end
|
91
|
-
version.gsub!(%r'^[a-zA-Z\-._]+', '')
|
92
|
-
|
93
|
-
# build = if version.match(VER_RGX)
|
94
|
-
# if version.match(%r'^(\d+\.?){2,3}$')
|
95
|
-
# 0
|
96
|
-
# elsif version.match(%r'\-')
|
97
|
-
# version,build = version.split(/-/)
|
98
|
-
# build
|
99
|
-
# else
|
100
|
-
# 0
|
101
|
-
# end
|
102
|
-
# else
|
103
|
-
# 0
|
104
|
-
# end
|
105
|
-
build = ''
|
106
|
-
if version.match(VER_RGX)
|
107
|
-
if version.match(%r'\-')
|
108
|
-
version,build = version.split(/-/)
|
109
|
-
end
|
110
|
-
# else
|
111
|
-
# match = version.match(%r'^(\d+)-(\d{4}-\d{2}-\d{2}[_]\d{2}-\d{2}-\d{2})\.(release|snapshot)$')
|
112
|
-
# if match
|
113
|
-
# build = match[1]
|
114
|
-
# # version.gsub!(/^#{build}-/, '')
|
115
|
-
# version = match[2]
|
116
|
-
# end
|
71
|
+
VER_RGX = %r'^\d+\.\d+(\.?\d)*$'
|
72
|
+
MMP_RGX = %r'^(\d+\.?){2,3}$'
|
73
|
+
# ---------------------------------------------------------------------------------------------------------------
|
74
|
+
def getVersionBuildFromName(artifact)
|
75
|
+
version = artifact.dup
|
76
|
+
version.gsub!(%r'\.*(tar\.gz|tgz|bzip2|bz2|jar|war|[a-z]+)$', '')
|
77
|
+
version.gsub!(%r'^[a-zA-Z\-._]+', '')
|
78
|
+
build = ''
|
79
|
+
if version.match(VER_RGX)
|
80
|
+
if version.match(%r'\-')
|
81
|
+
version,build = version.split(/-/)
|
117
82
|
end
|
118
|
-
[version,build]
|
119
83
|
end
|
84
|
+
[version,build]
|
85
|
+
end
|
120
86
|
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
|
87
|
+
# ---------------------------------------------------------------------------------------------------------------
|
88
|
+
def getVersionBuild(path,artifact,comp)
|
89
|
+
version,build = File.split(path)
|
90
|
+
if build.match(%r'^\d+$') and version.match(%r'/?\d+\.\d+\.?\d*$') # Hole in one!
|
91
|
+
version = File.basename(version)
|
92
|
+
else
|
93
|
+
if build.match(VER_RGX)
|
94
|
+
version = build
|
95
|
+
build = ''
|
126
96
|
else
|
127
|
-
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
|
132
|
-
|
133
|
-
|
134
|
-
|
135
|
-
end
|
136
|
-
ver,bld = getVersionBuildFromName(artifact)
|
97
|
+
version = comp[1][:build].nil? ? '' : ( comp[1][:build] > 0 ? build.to_s : '' )
|
98
|
+
end
|
99
|
+
unless version.match(VER_RGX)
|
100
|
+
version = comp[1][:version] || ''
|
101
|
+
end
|
102
|
+
ver,bld = getVersionBuildFromName(artifact)
|
103
|
+
if version.empty?
|
104
|
+
version,build = [ver,bld]
|
137
105
|
if version.empty?
|
138
|
-
version
|
139
|
-
if version.empty?
|
140
|
-
version = @vars[:build_ver]
|
141
|
-
else
|
142
|
-
uri,ver = File.split(path)
|
143
|
-
if version =~ %r'^#{ver}'
|
144
|
-
if version =~ VER_RGX
|
145
|
-
if version =~ %r'^#{build}' # prob the major part of version
|
146
|
-
build = ''
|
147
|
-
end
|
148
|
-
else
|
149
|
-
unless version.eql?(ver)
|
150
|
-
build = version.dup
|
151
|
-
version = ver
|
152
|
-
build = build.gsub(%r'^#{version}(\.|-)*','')
|
153
|
-
end
|
154
|
-
end
|
155
|
-
else
|
156
|
-
build = version.dup
|
157
|
-
version = ver
|
158
|
-
build = build.gsub(%r'^#{version}(\.|-)*','')
|
159
|
-
end
|
160
|
-
end
|
106
|
+
version = @vars[:build_ver]
|
161
107
|
else
|
162
|
-
|
163
|
-
|
164
|
-
|
165
|
-
|
108
|
+
_,ver = File.split(path)
|
109
|
+
if version =~ %r'^#{ver}'
|
110
|
+
if version =~ VER_RGX
|
111
|
+
if version =~ %r'^#{build}' # prob the major part of version
|
112
|
+
build = ''
|
166
113
|
end
|
167
114
|
else
|
168
|
-
|
169
|
-
|
170
|
-
|
115
|
+
unless version.eql?(ver)
|
116
|
+
build = version.dup
|
117
|
+
version = ver
|
118
|
+
build = build.gsub(%r'^#{version}(\.|-)*','')
|
119
|
+
end
|
171
120
|
end
|
121
|
+
else
|
122
|
+
build = version.dup
|
123
|
+
version = ver
|
124
|
+
build = build.gsub(%r'^#{version}(\.|-)*','')
|
172
125
|
end
|
173
126
|
end
|
174
|
-
|
175
|
-
|
176
|
-
|
127
|
+
else
|
128
|
+
if ver.match(VER_RGX)
|
129
|
+
if ver.match(MMP_RGX)
|
130
|
+
if version.length < ver.length
|
131
|
+
version = ver # Guessing it is the better version
|
132
|
+
end
|
133
|
+
else
|
134
|
+
build = ver.dup
|
135
|
+
# version.gsub!(/\.d+$/, '')
|
136
|
+
build.gsub!(/^#{version}\.?/, '')
|
137
|
+
end
|
177
138
|
end
|
178
139
|
end
|
179
|
-
[
|
140
|
+
unless build.match(%r'^[1-9]\d*$')
|
141
|
+
build = comp[1][:build]
|
142
|
+
build = @vars[:build_num] if (build.nil? or build.empty? or build.to_i == 0)
|
143
|
+
end
|
180
144
|
end
|
145
|
+
[version,build]
|
146
|
+
end
|
181
147
|
|
182
|
-
|
183
|
-
|
184
|
-
|
185
|
-
|
186
|
-
|
187
|
-
|
148
|
+
# ---------------------------------------------------------------------------------------------------------------
|
149
|
+
def packageBuild()
|
150
|
+
@logger.step __method__.to_s
|
151
|
+
if isSameDirectory(Dir.pwd, ENV['WORKSPACE'])
|
152
|
+
if @vars.has_key?(:components) and not @vars[:components].empty?
|
153
|
+
@vars[:return_code] = 0
|
188
154
|
|
189
|
-
|
190
|
-
|
191
|
-
|
155
|
+
clazz = getRepoClass('S3')
|
156
|
+
if clazz.is_a?(Class) and not clazz.nil?
|
157
|
+
@repo = clazz.new(self)
|
192
158
|
|
159
|
+
if @vars[:return_code] == 0
|
160
|
+
lines = []
|
161
|
+
@vars[:artifacts] = []
|
162
|
+
# Deal with all artifacts of each component
|
163
|
+
@vars[:components].each { |comp|
|
164
|
+
processComponent(comp, lines)
|
165
|
+
}
|
193
166
|
if @vars[:return_code] == 0
|
194
|
-
lines
|
195
|
-
@vars[:artifacts] = []
|
196
|
-
# Deal with all artifacts of each component
|
197
|
-
@vars[:components].each { |comp|
|
198
|
-
artifact, path, version, build = parseComponent(comp)
|
199
|
-
|
200
|
-
require 'uri'
|
201
|
-
begin
|
202
|
-
parts = URI(path).path.gsub(%r'^#{File::SEPARATOR}','').split(File::SEPARATOR)
|
203
|
-
name = parts.shift
|
204
|
-
bucket = getBucket(name)
|
205
|
-
key = File.join(parts, '')
|
206
|
-
@logger.info "S3://#{name}:#{key} URL: #{path} #{artifact}"
|
207
|
-
objects = []
|
208
|
-
bucket.objects(prefix: key).each do |object|
|
209
|
-
if artifact.empty? or (not artifact.empty? and object.key =~ %r'#{key}#{artifact}')
|
210
|
-
objects << object
|
211
|
-
end
|
212
|
-
end
|
213
|
-
@logger.debug "S3://#{name}:#{key} has #{objects.size} objects"
|
214
|
-
local_dir = File.join(@vars[:local_dirs]['artifacts'],comp[0], '')
|
215
|
-
Dir.mkdir(local_dir, 0700) unless File.directory?(local_dir)
|
216
|
-
artifacts = []
|
217
|
-
changed = false
|
218
|
-
# 1 or more objects on the key/ path
|
219
|
-
if objects.size > 0
|
220
|
-
lines << "#{comp[0]}:#{artifact} v#{version} b#{build} - #{path}"
|
221
|
-
# When we start pulling the artifacts then everything that is build 0 get this build number, in fact all artifacts get this build number!
|
222
|
-
objects.each do |object|
|
223
|
-
@logger.info "\tchecking #{object.key}"
|
224
|
-
local = File.join(local_dir,File.basename(object.key))
|
225
|
-
etag = object.etag.gsub(%r/['"]/, '')
|
226
|
-
download = if File.exists?(local)
|
227
|
-
@logger.debug "\t\tchecking etag on #{local}"
|
228
|
-
stat = File.stat(local)
|
229
|
-
check = calcLocalETag(etag, local, stat.size)
|
230
|
-
if etag != check or object.size != stat.size or object.last_modified > stat.mtime
|
231
|
-
@logger.debug "\t\t#{etag} != \"#{check}\" #{object.size} != #{stat.size} #{object.last_modified} > #{stat.mtime}"
|
232
|
-
true
|
233
|
-
else
|
234
|
-
@logger.debug "\t\tmatched #{etag}"
|
235
|
-
false
|
236
|
-
end
|
237
|
-
else
|
238
|
-
true
|
239
|
-
end
|
240
|
-
if download
|
241
|
-
@logger.info "\t\tdownload #{object.size} bytes"
|
242
|
-
response = object.get(:response_target => local)
|
243
|
-
File.utime(response.last_modified, response.last_modified, local)
|
244
|
-
@logger.info "\t\tdone"
|
245
|
-
check = calcLocalETag(etag, local)
|
246
|
-
unless check.eql?(etag)
|
247
|
-
@logger.info "\tETag different: #{etag} != #{check}"
|
248
|
-
changed = true
|
249
|
-
end
|
250
|
-
else
|
251
|
-
@logger.info "\t\tunchanged"
|
252
|
-
end
|
253
|
-
artifacts << local
|
254
|
-
end
|
255
|
-
# The local file will be 1 artifact or an archive of the local artifacts when artifacts.size > 1
|
256
|
-
local = if artifacts.size > 0
|
257
|
-
if artifacts.size > 1
|
258
|
-
begin
|
259
|
-
# require 'zlib'
|
260
|
-
# require 'archive/tar/minitar'
|
261
|
-
file = File.join(local_dir, "#{comp[0]}-#{version}.zip")
|
262
|
-
if changed or not File.exists?(file)
|
263
|
-
# output = File.open(file, 'wb')
|
264
|
-
# output = Zlib::GzipWriter.new(output, Zlib::BEST_COMPRESSION, Zlib::RLE)
|
265
|
-
# Dir.chdir(local_dir) do
|
266
|
-
# Archive::Tar::Minitar.pack(artifacts.map{|f| f.gsub(%r'^#{local_dir}','')}, output, false )
|
267
|
-
# end
|
268
|
-
zipped_files = artifacts.map{|f| f.gsub(%r'^#{local_dir}','')}.join(' ')
|
269
|
-
Dir.chdir(local_dir) do
|
270
|
-
res = %x(zip -o9X #{file} #{zipped_files})
|
271
|
-
end
|
272
|
-
raise "Failed to zip #{file} containting #{zipped_files}" unless $?.exitstatus == 0
|
273
|
-
end
|
274
|
-
file
|
275
|
-
rescue Exception => e
|
276
|
-
@logger.error "Artifact error: #{file} #{e.class.name} #{e.message}"
|
277
|
-
File.unlink(file)
|
278
|
-
raise e
|
279
|
-
# ensure
|
280
|
-
# output.close if output and not output.closed?
|
281
|
-
end
|
282
|
-
else
|
283
|
-
artifacts[0]
|
284
|
-
end
|
285
|
-
else
|
286
|
-
end
|
287
|
-
addArtifact(@vars[:artifacts], local, local_dir, { module: comp[0], name: comp[0], build: build, version: version, file: local})
|
288
|
-
else
|
289
|
-
@logger.fatal "Artifact not found: s3://#{name}/#{key}#{artifact}"
|
290
|
-
@vars[:return_code] = Errors::ARTIFACT_NOT_FOUND
|
291
|
-
end
|
292
|
-
# rescue Aws::S3::Errors::NotFound => e
|
293
|
-
# @logger.fatal "Artifact S3 error: #{artifact} #{e.class.name} #{e.message}"
|
294
|
-
# raise e
|
295
|
-
# rescue Aws::S3::Errors::NoSuchKey => e
|
296
|
-
# @logger.error "Artifact S3 error: #{artifact} #{e.class.name} #{e.message}"
|
297
|
-
rescue Exception => e
|
298
|
-
@logger.error "Artifact error: #{artifact} #{e.class.name} #{e.message}"
|
299
|
-
raise e
|
300
|
-
end
|
301
|
-
}
|
302
|
-
if @vars[:return_code] == 0
|
303
|
-
cleanupAfterPackaging(lines)
|
304
|
-
end
|
305
|
-
|
306
|
-
else
|
307
|
-
@logger.fatal "S3 repo error: Bucket #{ENV['AWS_S3_BUCKET']}"
|
167
|
+
cleanupAfterPackaging(lines)
|
308
168
|
end
|
169
|
+
|
309
170
|
else
|
310
|
-
@logger.
|
311
|
-
@vars[:return_code] = Errors::BUILDER_REPO_TYPE
|
171
|
+
@logger.fatal "S3 repo error: Bucket #{ENV['AWS_S3_BUCKET']}"
|
312
172
|
end
|
313
173
|
else
|
314
|
-
@logger.error
|
315
|
-
@vars[:return_code] = Errors::
|
174
|
+
@logger.error "CiCd::Builder::Repo::#{type} is not a valid repo class"
|
175
|
+
@vars[:return_code] = Errors::BUILDER_REPO_TYPE
|
316
176
|
end
|
317
177
|
else
|
318
|
-
@logger.error
|
319
|
-
@vars[:return_code] = Errors::
|
178
|
+
@logger.error 'No components found during preparation?'
|
179
|
+
@vars[:return_code] = Errors::NO_COMPONENTS
|
320
180
|
end
|
181
|
+
else
|
182
|
+
@logger.error "Not in WORKSPACE? '#{pwd}' does not match WORKSPACE='#{workspace}'"
|
183
|
+
@vars[:return_code] = Errors::WORKSPACE_DIR
|
184
|
+
end
|
185
|
+
|
186
|
+
@vars[:return_code]
|
187
|
+
end
|
321
188
|
|
322
|
-
|
189
|
+
# ---------------------------------------------------------------------------------------------------------------
|
190
|
+
def cleanupAfterPackaging(lines)
|
191
|
+
begin
|
192
|
+
unless IO.write(@vars[:build_mff], lines.join("\n")) > 0
|
193
|
+
@logger.error "Nothing was written to build manifest '#{@vars[:build_mff]}'"
|
194
|
+
@vars[:return_code] = Errors::MANIFEST_EMPTY
|
195
|
+
end
|
196
|
+
rescue => e
|
197
|
+
@logger.error "Failed to write manifest '#{@vars[:build_mff]}' (#{e.message})"
|
198
|
+
@vars[:return_code] = Errors::MANIFEST_WRITE
|
199
|
+
end
|
200
|
+
FileUtils.rmtree(@vars[:build_dir])
|
201
|
+
@vars[:return_code] = File.directory?(@vars[:build_dir]) ? Errors::BUILD_DIR : 0
|
202
|
+
unless @vars[:return_code] == 0
|
203
|
+
@logger.warn "Remove manifest '#{@vars[:build_mff]}' due to error"
|
204
|
+
FileUtils.rm_f(@vars[:build_mff])
|
205
|
+
# @vars[:return_code] = File.exists?(@vars[:build_mff]) ? Errors::MANIFEST_DELETE : 0
|
323
206
|
end
|
207
|
+
end
|
324
208
|
|
325
|
-
|
326
|
-
|
327
|
-
|
328
|
-
|
329
|
-
|
209
|
+
private
|
210
|
+
|
211
|
+
# ---------------------------------------------------------------------------------------------------------------
|
212
|
+
def processComponent(comp, lines)
|
213
|
+
artifact, path, version, build = parseComponent(comp)
|
214
|
+
|
215
|
+
require 'uri'
|
216
|
+
begin
|
217
|
+
key, name, objects = getObjects(artifact, path)
|
218
|
+
local_dir = File.join(@vars[:local_dirs]['artifacts'], comp[0], '')
|
219
|
+
Dir.mkdir(local_dir, 0700) unless File.directory?(local_dir)
|
220
|
+
artifacts = []
|
221
|
+
changed = false
|
222
|
+
# 1 or more objects on the key/ path
|
223
|
+
if objects.size > 0
|
224
|
+
lines << "#{comp[0]}:#{artifact} v#{version} b#{build} - #{path}"
|
225
|
+
# When we start pulling the artifacts then everything that is build 0 get this build number, in fact all artifacts get this build number!
|
226
|
+
objects.each do |object|
|
227
|
+
@logger.info "\tchecking #{object.key}"
|
228
|
+
local = File.join(local_dir, File.basename(object.key))
|
229
|
+
etag = object.etag.gsub(%r/['"]/, '')
|
230
|
+
download = shouldDownload?(etag, local, object)
|
231
|
+
if download
|
232
|
+
changed = doDownload(etag, local, object)
|
233
|
+
else
|
234
|
+
@logger.info "\t\tunchanged"
|
235
|
+
end
|
236
|
+
artifacts << local
|
237
|
+
end
|
238
|
+
# The local file will be 1 artifact or an archive of the local artifacts when artifacts.size > 1
|
239
|
+
if artifacts.size > 0
|
240
|
+
local = getLocalArtifact(artifacts, changed, comp, local_dir, version)
|
241
|
+
addArtifact(@vars[:artifacts], local, local_dir, {module: comp[0], name: comp[0], build: build, version: version, file: local})
|
330
242
|
end
|
331
|
-
|
332
|
-
@logger.
|
333
|
-
@vars[:return_code] = Errors::
|
243
|
+
else
|
244
|
+
@logger.fatal "Artifact not found: s3://#{name}/#{key}#{artifact}"
|
245
|
+
@vars[:return_code] = Errors::ARTIFACT_NOT_FOUND
|
334
246
|
end
|
335
|
-
|
336
|
-
@
|
337
|
-
|
338
|
-
|
339
|
-
|
340
|
-
|
247
|
+
rescue Exception => e
|
248
|
+
@logger.error "Artifact error: #{artifact} #{e.class.name} #{e.message}"
|
249
|
+
raise e
|
250
|
+
end
|
251
|
+
end
|
252
|
+
|
253
|
+
# ---------------------------------------------------------------------------------------------------------------
|
254
|
+
def getObjects(artifact, path)
|
255
|
+
parts = URI(path).path.gsub(%r'^#{File::SEPARATOR}', '').split(File::SEPARATOR)
|
256
|
+
name = parts.shift
|
257
|
+
bucket = getBucket(name)
|
258
|
+
key = File.join(parts, '')
|
259
|
+
@logger.info "S3://#{name}:#{key} URL: #{path} #{artifact}"
|
260
|
+
objects = []
|
261
|
+
bucket.objects(prefix: key).each do |object|
|
262
|
+
if artifact.empty? or (not artifact.empty? and object.key =~ %r'#{key}#{artifact}')
|
263
|
+
objects << object
|
341
264
|
end
|
342
265
|
end
|
266
|
+
@logger.debug "S3://#{name}:#{key} has #{objects.size} objects"
|
267
|
+
return key, name, objects
|
268
|
+
end
|
343
269
|
|
344
|
-
|
345
|
-
|
346
|
-
|
347
|
-
|
348
|
-
|
349
|
-
|
350
|
-
|
351
|
-
|
352
|
-
|
353
|
-
|
354
|
-
|
355
|
-
|
356
|
-
|
270
|
+
# ---------------------------------------------------------------------------------------------------------------
|
271
|
+
def doDownload(etag, local, object)
|
272
|
+
@logger.info "\t\tdownload #{object.size} bytes"
|
273
|
+
response = object.get(:response_target => local)
|
274
|
+
File.utime(response.last_modified, response.last_modified, local)
|
275
|
+
@logger.info "\t\tdone"
|
276
|
+
check = calcLocalETag(etag, local)
|
277
|
+
if check.eql?(etag)
|
278
|
+
false
|
279
|
+
else
|
280
|
+
@logger.info "\tETag different: #{etag} != #{check}"
|
281
|
+
true
|
282
|
+
end
|
283
|
+
end
|
284
|
+
|
285
|
+
# ---------------------------------------------------------------------------------------------------------------
|
286
|
+
def shouldDownload?(etag, local, object)
|
287
|
+
if File.exists?(local)
|
288
|
+
@logger.debug "\t\tchecking etag on #{local}"
|
289
|
+
stat = File.stat(local)
|
290
|
+
check = calcLocalETag(etag, local, stat.size)
|
291
|
+
if etag != check or object.size != stat.size or object.last_modified > stat.mtime
|
292
|
+
@logger.debug "\t\t#{etag} != \"#{check}\" #{object.size} != #{stat.size} #{object.last_modified} > #{stat.mtime}"
|
293
|
+
true
|
357
294
|
else
|
358
|
-
|
359
|
-
|
360
|
-
version, build = getVersionBuild(path, artifact, comp)
|
295
|
+
@logger.debug "\t\tmatched #{etag}"
|
296
|
+
false
|
361
297
|
end
|
362
|
-
|
298
|
+
else
|
299
|
+
true
|
363
300
|
end
|
301
|
+
end
|
364
302
|
|
365
|
-
|
366
|
-
|
367
|
-
|
368
|
-
|
369
|
-
|
370
|
-
|
371
|
-
|
303
|
+
# ---------------------------------------------------------------------------------------------------------------
|
304
|
+
def getLocalArtifact(artifacts, changed, comp, local_dir, version)
|
305
|
+
if artifacts.size > 1
|
306
|
+
begin
|
307
|
+
file = File.join(local_dir, "#{comp[0]}-#{version}.zip")
|
308
|
+
if changed or not File.exists?(file)
|
309
|
+
zipped_files = artifacts.map { |f| f.gsub(%r'^#{local_dir}', '') }.join(' ')
|
310
|
+
Dir.chdir(local_dir) do
|
311
|
+
res = %x(zip -o9X #{file} #{zipped_files} 2>&1)
|
312
|
+
@logger.info res
|
313
|
+
end
|
314
|
+
raise "Failed to zip #{file} containting #{zipped_files}" unless $?.exitstatus == 0
|
315
|
+
end
|
316
|
+
file
|
372
317
|
rescue Exception => e
|
373
|
-
@logger.error "
|
318
|
+
@logger.error "Artifact error: #{file} #{e.class.name} #{e.message}"
|
319
|
+
File.unlink(file)
|
374
320
|
raise e
|
375
321
|
end
|
376
|
-
|
322
|
+
else
|
323
|
+
artifacts[0]
|
324
|
+
end
|
325
|
+
end
|
326
|
+
|
327
|
+
# ---------------------------------------------------------------------------------------------------------------
|
328
|
+
def parseComponent(comp)
|
329
|
+
if comp[1][:url]
|
330
|
+
path, artifact = File.split(comp[1][:url])
|
331
|
+
version, build = getVersionBuild(path, artifact, comp)
|
332
|
+
elsif comp[1][:base_url]
|
333
|
+
artifact = ''
|
334
|
+
if comp[1][:build].nil?
|
335
|
+
# noinspection RubyUnusedLocalVariable
|
336
|
+
version, build = comp[1][:version].split(%r'-')
|
337
|
+
# noinspection RubyUnusedLocalVariable
|
338
|
+
path = File.join(comp[1][:base_url], comp[1][:version])
|
339
|
+
else
|
340
|
+
version, build = [comp[1][:version], comp[1][:build]]
|
341
|
+
path = File.join(comp[1][:base_url], comp[1][:version], comp[1][:build])
|
342
|
+
end
|
343
|
+
else
|
344
|
+
path = ''
|
345
|
+
artifact = ''
|
346
|
+
version, build = getVersionBuild(path, artifact, comp)
|
347
|
+
end
|
348
|
+
return artifact, path, version, build
|
349
|
+
end
|
350
|
+
|
351
|
+
# ---------------------------------------------------------------------------------------------------------------
|
352
|
+
def getBucket(name = nil)
|
353
|
+
@s3 = @repo.getS3()
|
354
|
+
begin
|
355
|
+
::Aws::S3::Bucket.new(name: name || ENV['AWS_S3_BUCKET'], client: @s3)
|
356
|
+
rescue Aws::S3::Errors::NotFound
|
357
|
+
@vars[:return_code] = Errors::BUCKET
|
358
|
+
nil
|
359
|
+
rescue Exception => e
|
360
|
+
@logger.error "S3 Bucket resource API error: #{e.class.name} #{e.message}"
|
361
|
+
raise e
|
377
362
|
end
|
363
|
+
end
|
378
364
|
end
|
379
365
|
end
|
380
366
|
end
|
@@ -0,0 +1,163 @@
|
|
1
|
+
require 'artifactory'
|
2
|
+
|
3
|
+
module CiCd
|
4
|
+
module Builder
|
5
|
+
# noinspection RubySuperCallWithoutSuperclassInspection
|
6
|
+
module Manifest
|
7
|
+
module Repo
|
8
|
+
class Artifactory < CiCd::Builder::Repo::Artifactory
|
9
|
+
# include ::Artifactory::Resource
|
10
|
+
|
11
|
+
# ---------------------------------------------------------------------------------------------------------------
|
12
|
+
def initialize(builder)
|
13
|
+
super
|
14
|
+
end
|
15
|
+
|
16
|
+
# ---------------------------------------------------------------------------------------------------------------
|
17
|
+
def uploadToRepo(artifacts)
|
18
|
+
@manifest = {}
|
19
|
+
super
|
20
|
+
if @vars[:return_code] == 0
|
21
|
+
# Preserve the manifest (maybeUploadArtifactoryObject will add everything we upload to the instance var)
|
22
|
+
manifest = @manifest.dup
|
23
|
+
# Create a manifest for each product and store it.
|
24
|
+
createProductManifests(manifest)
|
25
|
+
# Get a super manifest of all products and store as learning-manifest
|
26
|
+
createSuperManifest(manifest) # -#{@vars[:variant]}
|
27
|
+
@manifest = manifest
|
28
|
+
end
|
29
|
+
# If we are asked to produce a PROJECTS_FILE then we do that from the manifest and components.
|
30
|
+
unless ENV['PROJECTS_FILE'].nil?
|
31
|
+
if @vars[:return_code] == 0
|
32
|
+
if File.directory?(File.realdirpath(File.dirname(ENV['PROJECTS_FILE'])))
|
33
|
+
createProjectsFile
|
34
|
+
else
|
35
|
+
@logger.error "The path to the PROJECTS_FILE (#{File.dirname(ENV['PROJECTS_FILE'])}) does not exist!"
|
36
|
+
@vars[:return_code] = Errors::NO_PROJECTS_PATH
|
37
|
+
end
|
38
|
+
end
|
39
|
+
end
|
40
|
+
@vars[:return_code]
|
41
|
+
end
|
42
|
+
|
43
|
+
# ---------------------------------------------------------------------------------------------------------------
|
44
|
+
def createProjectsFile
|
45
|
+
projects = {}
|
46
|
+
project_names = {}
|
47
|
+
unless ENV['PROJECT_NAMES'].nil?
|
48
|
+
if File.exists?(ENV['PROJECT_NAMES'])
|
49
|
+
project_names = JSON.load(IO.read(ENV['PROJECT_NAMES']))
|
50
|
+
else
|
51
|
+
@logger.error "The PROJECT_NAMES file (#{ENV['PROJECT_NAMES']}) does not exist!"
|
52
|
+
@vars[:return_code] = Errors::NO_PROJECT_NAMES
|
53
|
+
end
|
54
|
+
end
|
55
|
+
exts = Hash[@vars[:artifacts].map { |a| [a[:data][:name], File.extname(File.basename(a[:data][:file])).gsub(%r'^\.', '')] }]
|
56
|
+
@manifest.each do |mod, man|
|
57
|
+
man.each do |prod, build|
|
58
|
+
projects[prod] = {
|
59
|
+
name: project_names[prod] || prod,
|
60
|
+
module: mod,
|
61
|
+
ext: exts[prod],
|
62
|
+
class_filter: '',
|
63
|
+
}
|
64
|
+
end
|
65
|
+
end
|
66
|
+
IO.write(ENV['PROJECTS_FILE'], JSON.pretty_generate(projects, {indent: "\t", space: ' '}))
|
67
|
+
end
|
68
|
+
|
69
|
+
def createSuperManifest(manifest)
|
70
|
+
manifest_data = ''
|
71
|
+
manifest.each do |mod, man|
|
72
|
+
man.each do |k, v|
|
73
|
+
manifest_data += "#{k}=#{v}\n"
|
74
|
+
end
|
75
|
+
end
|
76
|
+
amn = artifactory_manifest_name # Just using a local iso invoking method_missing repeatedly ... ;)
|
77
|
+
data = {module: amn, data: manifest_data, version: @vars[:build_ver], build: @vars[:build_num], properties: @properties_matrix}
|
78
|
+
tempArtifactFile(amn, data)
|
79
|
+
data[:sha1] = Digest::SHA1.file(data[:file]).hexdigest
|
80
|
+
data[:md5] = Digest::MD5.file(data[:file]).hexdigest
|
81
|
+
data[:name] = amn
|
82
|
+
maybeUploadArtifactoryObject(data: data, artifact_module: amn, artifact_version: data[:version] || @vars[:version], file_name: '', file_ext: 'properties')
|
83
|
+
end
|
84
|
+
|
85
|
+
# ---------------------------------------------------------------------------------------------------------------
|
86
|
+
def createProductManifests(manifest)
|
87
|
+
manifest.each do |mod, man|
|
88
|
+
manifest_data = ''
|
89
|
+
man.each do |k, v|
|
90
|
+
manifest_data += "#{k}=#{v}\n"
|
91
|
+
end
|
92
|
+
data = {module: mod, data: manifest_data, version: @vars[:build_ver], build: @vars[:build_num], properties: @properties_matrix}
|
93
|
+
tempArtifactFile("#{mod}-manifest", data)
|
94
|
+
data[:sha1] = Digest::SHA1.file(data[:file]).hexdigest
|
95
|
+
data[:md5] = Digest::MD5.file(data[:file]).hexdigest
|
96
|
+
data[:name] = "#{mod}-manifest"
|
97
|
+
maybeUploadArtifactoryObject(data: data, artifact_module: data[:name], artifact_version: data[:version] || @vars[:version], file_name: '', file_ext: 'properties') # -#{@vars[:variant]}
|
98
|
+
end
|
99
|
+
end
|
100
|
+
|
101
|
+
# ---------------------------------------------------------------------------------------------------------------
|
102
|
+
def maybeUploadArtifactoryObject(args)
|
103
|
+
super
|
104
|
+
if @vars[:return_code] == 0
|
105
|
+
data = args[:data]
|
106
|
+
artifact_module = args[:artifact_module]
|
107
|
+
artifact_version = args[:artifact_version]
|
108
|
+
# file_ext = args[:file_ext]
|
109
|
+
# file_name = args[:file_name]
|
110
|
+
if @manifest[artifact_module].nil?
|
111
|
+
@manifest[artifact_module] = {}
|
112
|
+
file_name = artifact_module
|
113
|
+
else
|
114
|
+
file_name, _ = get_artifact_file_name_ext(data)
|
115
|
+
if file_name.empty?
|
116
|
+
file_name = artifact_module
|
117
|
+
else
|
118
|
+
file_name = "#{artifact_module}#{file_name}"
|
119
|
+
end
|
120
|
+
end
|
121
|
+
@manifest[artifact_module][file_name] = artifact_version
|
122
|
+
end
|
123
|
+
|
124
|
+
@vars[:return_code]
|
125
|
+
end
|
126
|
+
|
127
|
+
private :createProductManifests, :createProjectsFile, :createSuperManifest
|
128
|
+
end
|
129
|
+
end
|
130
|
+
end
|
131
|
+
end
|
132
|
+
end
|
133
|
+
|
134
|
+
=begin
|
135
|
+
|
136
|
+
{
|
137
|
+
"test-project": {
|
138
|
+
"name": "test-project",
|
139
|
+
"module": "test-server",
|
140
|
+
"ext": "war",
|
141
|
+
"class_filter": "role.role-1"
|
142
|
+
},
|
143
|
+
"test-project-2": {
|
144
|
+
"name": "test-project-2",
|
145
|
+
"module": "test-server2",
|
146
|
+
"ext": "zip",
|
147
|
+
"class_filter": "role.role-2"
|
148
|
+
},
|
149
|
+
"test-manifest": {
|
150
|
+
"name": "test-manifest",
|
151
|
+
"module": "test-manifest",
|
152
|
+
"ext": "properties",
|
153
|
+
"class_filter": ""
|
154
|
+
},
|
155
|
+
"test-manifest2": {
|
156
|
+
"name": "test-manifest2",
|
157
|
+
"module": "test-manifest2",
|
158
|
+
"ext": "properties",
|
159
|
+
"class_filter": ""
|
160
|
+
}
|
161
|
+
}
|
162
|
+
|
163
|
+
=end
|
@@ -0,0 +1,44 @@
|
|
1
|
+
require 'json'
|
2
|
+
|
3
|
+
module CiCd
|
4
|
+
module Builder
|
5
|
+
# noinspection RubySuperCallWithoutSuperclassInspection
|
6
|
+
module Manifest
|
7
|
+
module Repo
|
8
|
+
require 'cicd/builder/mixlib/repo/base'
|
9
|
+
require 'cicd/builder/mixlib/repo/S3'
|
10
|
+
# noinspection RubyResolve
|
11
|
+
if ENV.has_key?('REPO_TYPE') and (not ENV['REPO_TYPE'].capitalize.eql?('S3'))
|
12
|
+
require "cicd/builder/manifest/mixlib/repo/#{ENV['REPO_TYPE'].downcase}"
|
13
|
+
end
|
14
|
+
|
15
|
+
# ---------------------------------------------------------------------------------------------------------------
|
16
|
+
def getRepoClass(type = nil)
|
17
|
+
@logger.info __method__.to_s
|
18
|
+
if type.nil?
|
19
|
+
type ||= 'S3'
|
20
|
+
if ENV.has_key?('REPO_TYPE')
|
21
|
+
type = ENV['REPO_TYPE']
|
22
|
+
end
|
23
|
+
end
|
24
|
+
|
25
|
+
@logger.info "#{type} repo interface"
|
26
|
+
clazz = begin
|
27
|
+
Object.const_get("#{self.class.name.gsub(%r'::\w+$', '')}::Repo::#{type}")
|
28
|
+
rescue NameError #=> e
|
29
|
+
Object.const_get("CiCd::Builder::Repo::#{type}")
|
30
|
+
end
|
31
|
+
|
32
|
+
if block_given?
|
33
|
+
if clazz.is_a?(Class) and not clazz.nil?
|
34
|
+
yield
|
35
|
+
end
|
36
|
+
end
|
37
|
+
|
38
|
+
clazz
|
39
|
+
end
|
40
|
+
|
41
|
+
end
|
42
|
+
end
|
43
|
+
end
|
44
|
+
end
|
@@ -10,8 +10,11 @@ module CiCd
|
|
10
10
|
module Manifest
|
11
11
|
class Runner < ChefRepo::Runner
|
12
12
|
require 'cicd/builder/manifest/mixlib/build'
|
13
|
+
require 'cicd/builder/manifest/mixlib/repo'
|
13
14
|
include CiCd::Builder::Manifest::Build
|
15
|
+
include CiCd::Builder::Manifest::Repo
|
14
16
|
|
17
|
+
# ---------------------------------------------------------------------------------------------------------------
|
15
18
|
def initialize()
|
16
19
|
super
|
17
20
|
@default_options[:builder] = VERSION
|
@@ -26,12 +29,13 @@ module CiCd
|
|
26
29
|
patch: PATCH,
|
27
30
|
}
|
28
31
|
end
|
32
|
+
|
29
33
|
# ---------------------------------------------------------------------------------------------------------------
|
30
34
|
def setup()
|
31
35
|
$stdout.write("ManifestBuilder v#{CiCd::Builder::Manifest::VERSION}\n")
|
32
36
|
@default_options[:env_keys] << %w(
|
33
|
-
|
34
|
-
|
37
|
+
MANIFEST_FILE
|
38
|
+
)
|
35
39
|
super
|
36
40
|
end
|
37
41
|
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: manifest-builder
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.5.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Christo De Lange
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2015-03-
|
11
|
+
date: 2015-03-16 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: cicd-builder
|
@@ -229,6 +229,8 @@ files:
|
|
229
229
|
- features/step_definitions/manifestrepo-builder_steps.rb
|
230
230
|
- lib/cicd/builder/manifest.rb
|
231
231
|
- lib/cicd/builder/manifest/mixlib/build.rb
|
232
|
+
- lib/cicd/builder/manifest/mixlib/repo.rb
|
233
|
+
- lib/cicd/builder/manifest/mixlib/repo/artifactory.rb
|
232
234
|
- lib/cicd/builder/manifest/version.rb
|
233
235
|
- manifest-builder.gemspec
|
234
236
|
- spec/builder_spec.rb
|