cicd-builder 0.9.23 → 0.9.25

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,355 @@
1
+ require 'aws-sdk-core'
2
+ require 'aws-sdk-resources'
3
+
4
+ module CiCd
5
+ module Builder
6
+ module Repo
7
+ class S3 < CiCd::Builder::Repo::Base
8
+
9
+ # ---------------------------------------------------------------------------------------------------------------
10
+ def initialize(builder)
11
+ raise "Missing variable AWS_S3_BUCKET" unless ENV.has_key?('AWS_S3_BUCKET')
12
+ super(builder)
13
+ end
14
+
15
+ # ---------------------------------------------------------------------------------------------------------------
16
+ def getS3()
17
+ region = ENV['AWS_REGION'] || ::Aws.config[:region] || 'us-east-1'
18
+ unless @s3
19
+ @s3 = ::Aws::S3::Client.new(region: region)
20
+ end
21
+ unless @s3 and ((@s3.config.access_key_id and @s3.config.secret_access_key) or @s3.config.credentials)
22
+ @logger.warn "Unable to find AWS credentials in standard locations:
23
+ ENV['AWS_ACCESS_KEY'] and ENV['AWS_SECRET_ACCESS_KEY']
24
+ Aws.config[:credentials]
25
+ Shared credentials file, ~/.aws/credentials
26
+ EC2 Instance profile
27
+ "
28
+ if ENV['AWS_PROFILE']
29
+ @logger.info "Trying profile '#{ENV['AWS_PROFILE']}' explicitly"
30
+ creds = Aws::SharedCredentials.new( path: File.expand_path('~/.aws/credentials'), profile: ENV['AWS_PROFILE'] )
31
+ if creds.loadable?
32
+ @s3 = ::Aws::S3::Client.new(region: region, credentials: creds)
33
+ end
34
+ else
35
+ @logger.warn 'No AWS_PROFILE defined'
36
+ end
37
+ end
38
+ unless @s3 and ((@s3.config.access_key_id and @s3.config.secret_access_key) or @s3.config.credentials)
39
+ raise 'Unable to find AWS credentials!'
40
+ end
41
+ @s3
42
+ end
43
+
44
+ # ---------------------------------------------------------------------------------------------------------------
45
+ def uploadToRepo(artifacts)
46
+ s3 = getS3()
47
+ artifacts.each{|art|
48
+
49
+ s3_obj = maybeS3Object(art[:key], s3)
50
+ upload = false
51
+ md5 = nil
52
+ if art[:data][:data]
53
+ # md5 = Digest::MD5.hexdigest(art[:data][:data])
54
+ tempArtifactFile('artifact', art[:data])
55
+ end
56
+ if s3_obj.nil?
57
+ upload = true
58
+ else
59
+ @logger.info "s3://#{ENV['AWS_S3_BUCKET']}/#{art[:key]} exists"
60
+ etag = s3_obj.etag.gsub(/"/, '')
61
+ md5 = if art[:data].has_key?(:file)
62
+ # md5 = Digest::MD5.file(art[:data][:file]).hexdigest
63
+ calcLocalETag(etag, art[:data][:file])
64
+ else
65
+ raise "Internal error: No :file in #{art[:data].ai}"
66
+ end
67
+ unless etag == md5
68
+ checksum = s3_obj.metadata[:checksum]
69
+ unless checksum and checksum == md5
70
+ @logger.warn "s3://#{ENV['AWS_S3_BUCKET']}/#{art[:key]} is different from our #{art[:key]}(#{s3_obj.etag} <=> #{md5})"
71
+ upload = true
72
+ end
73
+ end
74
+ end
75
+
76
+ if upload
77
+ @logger.info "Upload new s3://#{ENV['AWS_S3_BUCKET']}/#{art[:key]}"
78
+ # Get size before upload changes our object
79
+ body = nil
80
+ if art[:data].has_key?(:file)
81
+ size = File.size(art[:data][:file])
82
+ body = File.open(art[:data][:file], 'r')
83
+ else
84
+ # size = art[:data][:data].length
85
+ # body = art[:data][:data]
86
+ raise "Internal error: No :file in #{art[:data].ai}"
87
+ end
88
+ art[:data][:metadata] = {checksum: md5, digest: "md5=#{md5}"}
89
+ # art[:data][:'x-amz-meta-digest'] = "md5=#{md5}"
90
+ res = s3.put_object( bucket: ENV['AWS_S3_BUCKET'],
91
+ key: art[:key],
92
+ body: body,
93
+ # acl: 'authenticated-read',
94
+ content_length: size,
95
+ metadata: art[:data][:metadata],
96
+ )
97
+ s3_obj = maybeS3Object(art[:key], s3)
98
+ raise "Failed to upload '#{art[:key]}'" unless s3_obj
99
+ if art.has_key?(:public_url)
100
+ @vars[art[:public_url]] = s3_obj.public_url
101
+ end
102
+ if art.has_key?(:read_url)
103
+ @vars[art[:read_url]] = s3_obj.presigned_url(:get, expires_in: 86400)
104
+ @logger.info "#{art[:label]}: #{@vars[art[:read_url]]}"
105
+ end
106
+ end
107
+ if art[:data][:temp]
108
+ File.unlink(art[:data][:file])
109
+ end
110
+ }
111
+ 0
112
+ end
113
+
114
+ def maybeS3Object(key, s3 = nil)
115
+ s3 ||= getS3()
116
+ s3_obj = begin
117
+ obj = ::Aws::S3::Object.new(bucket_name: ENV['AWS_S3_BUCKET'], key: key, client: s3)
118
+ obj.etag
119
+ obj
120
+ rescue Aws::S3::Errors::NotFound
121
+ nil
122
+ rescue Aws::S3::Errors::NoSuchKey
123
+ nil
124
+ end
125
+ s3_obj
126
+ end
127
+
128
+ # ---------------------------------------------------------------------------------------------------------------
129
+ def takeInventory()
130
+ def _update(hash, key, value)
131
+ h = {}
132
+ i = -1
133
+ hash[key].each { |v| h[v] = i+=1 }
134
+ unless h.has_key?(value)
135
+ h[value] = h.keys.size # No -1 because this is evaluated BEFORE we make the addition!
136
+ end
137
+ s = h.sort_by { |_, v| v }
138
+ s = s.map { |v| v[0] }
139
+ hash[key] = s
140
+ h[value]
141
+ end
142
+
143
+ # Read and parse in JSON
144
+ json_s = ''
145
+ json = nil
146
+ varianth = nil
147
+
148
+ key = "#{@vars[:project_name]}/INVENTORY.json"
149
+ s3_obj = maybeS3Object(key)
150
+ # If the inventory has started then add to it else create a new one
151
+ if s3_obj.nil?
152
+ # Start a new inventory
153
+ over = true
154
+ else
155
+ resp = s3_obj.get()
156
+ body = resp.body
157
+ if body.is_a?(String)
158
+ json_s = resp.data
159
+ else
160
+ body.rewind
161
+ json_s = body.read()
162
+ end
163
+ json = Yajl::Parser.parse(json_s)
164
+ over = false
165
+ # Is the inventory format up to date ...
166
+ constraint = ::Semverse::Constraint.new "<= #{@options[:gen]}"
167
+ version = ::Semverse::Version.new(json['gen'])
168
+ # raise CiCd::Builder::Errors::InvalidVersion.new "The constraint failed: #{json['gen']} #{constraint}"
169
+
170
+ unless constraint.satisfies?(version)
171
+ raise CiCd::Builder::Errors::InvalidVersion.new "The inventory generation is newer than I can manage: #{version} <=> #{@options[:gen]}"
172
+ end
173
+ if json['container'] and json['container']['variants']
174
+ # but does not have our variant then add it
175
+ variants = json['container']['variants']
176
+ unless variants[@vars[:variant]]
177
+ variants[@vars[:variant]] = {}
178
+ varianth = variants[@vars[:variant]]
179
+ varianth['builds'] = []
180
+ varianth['branches'] = []
181
+ varianth['versions'] = []
182
+ varianth['releases'] = []
183
+ varianth['latest'] = {
184
+ branch: -1,
185
+ version: -1,
186
+ build: -1,
187
+ release: -1,
188
+ }
189
+ end
190
+ varianth = variants[@vars[:variant]]
191
+ # If the inventory 'latest' format is up to date ...
192
+ unless varianth['latest'] and
193
+ varianth['latest'].is_a?(Hash)
194
+ # Start over ... too old/ incompatible
195
+ over = true
196
+ end
197
+ else
198
+ # Start over ... too old/ incompatible
199
+ over = true
200
+ end
201
+ end
202
+ # Starting fresh ?
203
+ if over or json.nil?
204
+ json_s = initInventory()
205
+ else
206
+ raise CiCd::Builder::Errors::Internal.new sprintf('Internal logic error! %s::%d', __FILE__,__LINE__) if varianth.nil?
207
+ # Add the new build if we don't have it
208
+ unless varianth['builds'].map { |b| b['build_name'] }.include?(@vars[:build_nmn])
209
+ #noinspection RubyStringKeysInHashInspection
210
+ filing = {
211
+ 'drawer' => @vars[:build_nam],
212
+ 'build_name' => @vars[:build_nmn],
213
+ 'build_number' => @vars[:build_num],
214
+ 'release' => @vars[:release],
215
+ }
216
+ if @vars.has_key?(:artifacts)
217
+ filing['artifacts'] = @vars[:artifacts].map { |artifact| File.basename(artifact[:key]) }
218
+ end
219
+ assembly = json['container']['assembly'] or raise("Expected an 'assembly'")
220
+ if assembly['extension'] != !vars[:build_ext]
221
+ # noinspection RubyStringKeysInHashInspection
222
+ filing['assembly'] = {
223
+ 'extension' => @vars[:build_ext],
224
+ 'type' => 'tarbzip2'
225
+ }
226
+ end
227
+ varianth['builds'] << filing
228
+ end
229
+ build_lst = (varianth['builds'].size-1)
230
+ build_rel = build_lst
231
+ i = -1
232
+ varianth['builds'].each{ |h|
233
+ i += 1
234
+ convert_build(h)
235
+ convert_build(varianth['builds'][build_rel])
236
+ if h['release'].to_i > varianth['builds'][build_rel]['release'].to_i
237
+ build_rel = i
238
+ elsif h['release'] == varianth['builds'][build_rel]['release']
239
+ build_rel = i if h['build_number'].to_i > varianth['builds'][build_rel]['build_number'].to_i
240
+ end
241
+ }
242
+
243
+ # Add new branch ...
244
+ build_bra = _update(varianth, 'branches', @vars[:build_bra])
245
+ # Add new version ...
246
+ build_ver = _update(varianth, 'versions', @vars[:build_ver])
247
+
248
+ # Set latest
249
+ varianth['latest'] = {
250
+ branch: build_bra,
251
+ version: build_ver,
252
+ build: build_lst,
253
+ release: build_rel,
254
+ }
255
+ json['gen'] = @options[:gen]
256
+ json_s = JSON.pretty_generate( json, { indent: "\t", space: ' '})
257
+ end
258
+ begin
259
+ md5 = Digest::MD5.hexdigest(json_s)
260
+ # [:'x-amz-meta-digest'] = "md5=#{md5}"
261
+ resp = getS3.put_object( bucket: ENV['AWS_S3_BUCKET'],
262
+ key: key,
263
+ body: json_s,
264
+ # acl: 'authenticated-read',
265
+ metadata: {checksum: md5, digest: "md5=#{md5}"},
266
+ )
267
+ s3_obj = maybeS3Object(key)
268
+ # s3_obj.etag
269
+ @logger.info "Inventory URL: #{s3_obj.presigned_url(:get, expires_in: 86400)}"
270
+ return 0
271
+ rescue Exception => e
272
+ @logger.error("Exception: #{e.class.name}: #{e.message}\n#{e.backtrace.ai}")
273
+ return Errors::INVENTORY_UPLOAD_EXCEPTION
274
+ end
275
+ end
276
+
277
+ def convert_build(h)
278
+ if h.has_key?('number')
279
+ h['build_number'] = h['number']
280
+ h.delete 'number'
281
+ elsif h.has_key?('build_number')
282
+ h.delete 'number'
283
+ else
284
+ h_build = h.has_key?('build') ? h['build'] : h['build_name']
285
+ h_number = h_build.gsub(/^.*?-build-([0-9]+)$/, '\1').to_i
286
+
287
+ h['build_number'] = h_number
288
+ h['build_name'] = h_build
289
+ h.delete 'build'
290
+ h.delete 'number'
291
+ end
292
+ if h.has_key?('build')
293
+ h_build = h.has_key?('build')
294
+ h_number = h_build.gsub(/^.*?-build-([0-9]+)$/, '\1').to_i
295
+
296
+ h['build_number'] = h_number
297
+ h['build_name'] = h_build
298
+ h.delete 'build'
299
+ h.delete 'number'
300
+ end
301
+ h
302
+ end
303
+
304
+ # ---------------------------------------------------------------------------------------------------------------
305
+ def uploadBuildArtifacts()
306
+ if @vars.has_key?(:build_dir) and @vars.has_key?(:build_pkg)
307
+ artifacts = @vars[:artifacts] rescue []
308
+
309
+ key = getKey()
310
+ if File.exists?(@vars[:build_pkg])
311
+ # Store the assembly - be sure to inherit possible overrides in pkg name and ext but dictate the drawer!
312
+ artifacts << {
313
+ key: "#{File.join(File.dirname(key),File.basename(@vars[:build_pkg]))}",
314
+ data: {:file => @vars[:build_pkg]},
315
+ public_url: :build_url,
316
+ label: 'Package URL'
317
+ }
318
+ else
319
+ @logger.warn "Skipping upload of missing artifact: '#{@vars[:build_pkg]}'"
320
+ end
321
+
322
+ # Store the metadata
323
+ manifest = manifestMetadata()
324
+ artifacts << {
325
+ key: "#{key}.MANIFEST.json",
326
+ data: {:data => manifest},
327
+ public_url: :manifest_url,
328
+ read_url: :manifest_url,
329
+ label: 'Manifest URL'
330
+ }
331
+
332
+ # Store the checksum
333
+ artifacts << {
334
+ key: "#{@vars[:project_name]}/#{@vars[:variant]}/#{@vars[:build_nam]}/#{@vars[:build_nmn]}.checksum",
335
+ data: {:data => @vars[:build_sha]},
336
+ public_url: :checksum_url,
337
+ read_url: :checksum_url,
338
+ label: 'Checksum URL'
339
+ }
340
+
341
+ @vars[:return_code] = uploadToRepo(artifacts)
342
+ if 0 == @vars[:return_code]
343
+ @vars[:return_code] = takeInventory()
344
+ end
345
+ @vars[:return_code]
346
+ else
347
+ @vars[:return_code] = Errors::NO_ARTIFACTS
348
+ end
349
+ @vars[:return_code]
350
+ end
351
+
352
+ end
353
+ end
354
+ end
355
+ end
@@ -0,0 +1,237 @@
1
+ require 'artifactory'
2
+
3
+ module CiCd
4
+ module Builder
5
+ module Repo
6
+ class Artifactory < CiCd::Builder::Repo::Base
7
+ # include ::Artifactory::Resource
8
+
9
+ # ---------------------------------------------------------------------------------------------------------------
10
+ def initialize(builder)
11
+ # Check for the necessary environment variables
12
+ map_keys = {}
13
+
14
+ %w[ARTIFACTORY_ENDPOINT ARTIFACTORY_USERNAME ARTIFACTORY_PASSWORD ARTIFACTORY_REPO].each { |k|
15
+ map_keys[k]= (not ENV.has_key?(k) or ENV[k].empty?)
16
+ }
17
+ missing = map_keys.keys.select{ |k| map_keys[k] }
18
+
19
+ if missing.count() > 0
20
+ raise("Need these environment variables: #{missing.ai}")
21
+ end
22
+
23
+ super(builder)
24
+
25
+ # ::Artifactory.configure do |config|
26
+ # # The endpoint for the Artifactory server. If you are running the "default"
27
+ # # Artifactory installation using tomcat, don't forget to include the
28
+ # # +/artifactoy+ part of the URL.
29
+ # config.endpoint = artifactory_endpoint()
30
+ #
31
+ # # The basic authentication information. Since this uses HTTP Basic Auth, it
32
+ # # is highly recommended that you run Artifactory over SSL.
33
+ # config.username = ENV['ARTIFACTORY_USERNAME']
34
+ # config.password = ENV['ARTIFACTORY_PASSWORD']
35
+ #
36
+ # # Speaking of SSL, you can specify the path to a pem file with your custom
37
+ # # certificates and the gem will wire it all up for you (NOTE: it must be a
38
+ # # valid PEM file).
39
+ # # config.ssl_pem_file = '/path/to/my.pem'
40
+ #
41
+ # # Or if you are feelying frisky, you can always disable SSL verification
42
+ # # config.ssl_verify = false
43
+ #
44
+ # # You can specify any proxy information, including any authentication
45
+ # # information in the URL.
46
+ # # config.proxy_username = 'user'
47
+ # # config.proxy_password = 'password'
48
+ # # config.proxy_address = 'my.proxy.server'
49
+ # # config.proxy_port = '8080'
50
+ # end
51
+ @client = ::Artifactory::Client.new()
52
+ end
53
+
54
+ # ---------------------------------------------------------------------------------------------------------------
55
+ def method_missing(name, *args)
56
+ if name =~ %r'^artifactory_'
57
+ key = name.to_s.upcase
58
+ raise "ENV has no key #{key}" unless ENV.has_key?(key)
59
+ ENV[key]
60
+ else
61
+ super
62
+ end
63
+ end
64
+
65
+ # ---------------------------------------------------------------------------------------------------------------
66
+ def uploadToRepo(artifacts)
67
+ # Set a few build properties on the endpoint URL
68
+ @properties_matrix = {
69
+ :'build.name' => @vars[:build_mdd][:Project],
70
+ :'build.number' => @vars[:build_mdd][:Build],
71
+ :'vcs.revision' => @vars[:build_mdd][:Commit]
72
+ }
73
+ @vars[:build_mdd].each do |k,v|
74
+ @properties_matrix["build.#{k.downcase}"] = v
75
+ end
76
+ # matrix = properties.map{|k,v| (v.nil? or v.empty?) ? nil : "#{k}=#{v}"}.join("\;").gsub(%r'^\;*(.*?)\;*$', '\1')
77
+ # @client.endpoint += ";#{matrix}"
78
+ manifest = {}
79
+ artifacts.each{|art|
80
+ data = art[:data]
81
+ objects = maybeArtifactoryObject(data)
82
+ upload = false
83
+ if data.has_key?(:data)
84
+ tempArtifactFile("manifest-#{data[:name]}", data)
85
+ end
86
+ if data.has_key?(:file)
87
+ sha1 = Digest::SHA1.file(data[:file]).hexdigest
88
+ md5 = Digest::MD5.file(data[:file]).hexdigest
89
+ else
90
+ raise 'Artifact does not have file or data?'
91
+ end
92
+ file_name = File.basename(data[:file])
93
+ if file_name =~ %r'^#{data[:name]}'
94
+ file_name.gsub!(%r'^#{data[:name]}\.*','')
95
+ end
96
+ file_name.gsub!(%r'\.*-*#{data[:version]}','')
97
+ file_name.gsub!(%r'\.*-*#{data[:build]}-*','')
98
+ file_ext = file_name.dup
99
+ file_ext.gsub!(%r'^.*?\.*(tar\.gz|tgz|tar\.bzip2|bzip2|tar\.bz2|bz2|jar|war|groovy)$','\1')
100
+ unless file_ext.empty?
101
+ file_name.gsub!(%r'\.*#{file_ext}$','')
102
+ end
103
+ if file_name =~ %r'\.+'
104
+ raise "Unable to parse out file name in #{data[:file]}"
105
+ end
106
+ unless file_name.empty? or file_name.match(%r'^-')
107
+ file_name = "-#{file_name}"
108
+ end
109
+ artifact_name = "#{data[:name]}-#{data[:version]}#{file_name}-#{data[:build]}.#{file_ext}" # -#{@vars[:variant]}
110
+ artifact_path = "#{artifactory_org_path()}/#{data[:name]}/#{data[:version]}-#{@vars[:variant]}/#{artifact_name}"
111
+ manifest[data[:name]] = artifact_path
112
+ if objects.nil? or objects.size == 0
113
+ upload = true
114
+ else
115
+ @logger.info "#{artifactory_endpoint()}/#{artifactory_repo()}/#{artifact_path} exists - #{objects.size} results"
116
+ # Check the checksum of the artifact
117
+ matched = false
118
+ objects.each do |artifact|
119
+ @logger.debug "\tChecking: #{artifact.attributes.ai} for #{artifact_path}"
120
+ if artifact.uri.match(%r'#{artifact_path}$')
121
+ matched = true
122
+ @logger.info "\tMatched: #{artifact.attributes.select{|k,_| k != :client}.ai}"
123
+ if artifact.md5 != md5 or artifact.sha1 != sha1
124
+ upload = true
125
+ end
126
+ end
127
+ end
128
+ upload ||= (not matched)
129
+ end
130
+
131
+ if upload
132
+ data[:properties] = @properties_matrix
133
+ uploadArtifact(artifact_path, data, md5, sha1)
134
+ else
135
+ @logger.info "Keep existing #{artifactory_endpoint()}/#{artifact_path}"
136
+ end
137
+ if data[:temp]
138
+ File.unlink(data[:file])
139
+ end
140
+ }
141
+ manifest_data = ''
142
+ manifest.each do |k,v|
143
+ manifest_data += "#{k}=#{v}\n"
144
+ end
145
+ data = { data: manifest_data, version: @vars[:build_ver], build: @vars[:build_num], properties: @properties_matrix }
146
+ tempArtifactFile('manifest', data)
147
+ sha1 = Digest::SHA1.file(data[:file]).hexdigest
148
+ md5 = Digest::MD5.file(data[:file]).hexdigest
149
+ artifact_name = "#{artifactory_manifest_name}-#{data[:version]}-#{data[:build]}.properties"
150
+ artifact_path = "#{artifactory_org_path()}/#{artifactory_manifest_module}/#{data[:version]}-#{@vars[:variant]}/#{artifact_name}"
151
+ uploadArtifact(artifact_path, data, md5, sha1)
152
+ if data[:temp]
153
+ File.unlink(data[:file])
154
+ end
155
+ 0
156
+ end
157
+
158
+ def uploadArtifact(artifact_path, data, md5, sha1)
159
+ data[:size] = File.size(data[:file])
160
+ @logger.info "[#{Time.now.strftime('%Y-%m-%d %H:%M:%S %z')}] Start upload #{artifact_path} = #{data[:size]} bytes"
161
+ artifact = ::Artifactory::Resource::Artifact.new(local_path: data[:file], client: @client)
162
+ # noinspection RubyStringKeysInHashInspection
163
+ artifact.checksums = {
164
+ 'md5' => md5,
165
+ 'sha1' => sha1
166
+ }
167
+ artifact.size = data[:size]
168
+ result = artifact.upload(artifactory_repo(), "#{artifact_path}", data[:properties] || {})
169
+ @logger.info "[#{Time.now.strftime('%Y-%m-%d %H:%M:%S %z')}] Uploaded: #{result.attributes.select { |k, _| k != :client }.ai}"
170
+ artifact.upload_checksum(artifactory_repo(), "#{artifact_path}", :sha1, sha1)
171
+ artifact.upload_checksum(artifactory_repo(), "#{artifact_path}", :md5, md5)
172
+ objects = maybeArtifactoryObject(data, false)
173
+ raise "Failed to upload '#{artifact_path}'" unless objects.size > 0
174
+ end
175
+
176
+ def maybeArtifactoryObject(data,wide=true)
177
+ begin
178
+ # Get a list of matching artifacts in this repository
179
+ result = @client.artifact_gavc_search(group: artifactory_org_path(), name: data[:name], version: "#{data[:version]}-#{@vars[:variant]}", repos: [artifactory_repo()])
180
+ if result.size > 0
181
+ @logger.info "Artifactory gavc_search match g=#{artifactory_org_path()},a=#{data[:name]},v=#{data[:version]}-#{@vars[:variant]},r=#{artifactory_repo()}: #{result}"
182
+ # raise "GAVC started working: #{result.ai}"
183
+ elsif wide
184
+ @logger.warn 'GAVC search came up empty!'
185
+ result = @client.artifact_search(name: data[:name], repos: [artifactory_repo()])
186
+ @logger.info "Artifactory search match a=#{data[:name]},r=#{artifactory_repo()}: #{result}"
187
+ end
188
+ result
189
+ rescue Exception => e
190
+ @logger.error "Artifactory error: #{e.class.name} #{e.message}"
191
+ raise e
192
+ end
193
+ end
194
+
195
+ # ---------------------------------------------------------------------------------------------------------------
196
+ def uploadBuildArtifacts()
197
+ if @vars.has_key?(:build_dir) and @vars.has_key?(:build_pkg)
198
+ begin
199
+ artifacts = @vars[:artifacts] rescue []
200
+
201
+ key = getKey()
202
+ if File.exists?(@vars[:build_pkg])
203
+ # Store the assembly - be sure to inherit possible overrides in pkg name and ext but dictate the drawer!
204
+ artifacts << {
205
+ key: "#{File.join(File.dirname(key),File.basename(@vars[:build_pkg]))}",
206
+ data: {:file => @vars[:build_pkg]},
207
+ public_url: :build_url,
208
+ label: 'Package URL'
209
+ }
210
+ else
211
+ @logger.warn "Skipping upload of missing artifact: '#{@vars[:build_pkg]}'"
212
+ end
213
+
214
+ # Store the metadata
215
+ manifest = manifestMetadata()
216
+ hash = JSON.parse(manifest)
217
+
218
+ @vars[:return_code] = uploadToRepo(artifacts)
219
+ # if 0 == @vars[:return_code]
220
+ # @vars[:return_code] = takeInventory()
221
+ # end
222
+ @vars[:return_code]
223
+ rescue => e
224
+ @logger.error "#{e.class.name} #{e.message}"
225
+ @vars[:return_code] = Errors::ARTIFACT_UPLOAD_EXCEPTION
226
+ raise e
227
+ end
228
+ else
229
+ @vars[:return_code] = Errors::NO_ARTIFACTS
230
+ end
231
+ @vars[:return_code]
232
+ end
233
+
234
+ end
235
+ end
236
+ end
237
+ end