dpl-connect 1.8.43
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.coveralls.yml +1 -0
- data/.gitignore +8 -0
- data/.rspec +2 -0
- data/.travis.yml +36 -0
- data/Gemfile +100 -0
- data/LICENSE +22 -0
- data/README.md +934 -0
- data/Rakefile +1 -0
- data/TESTING.md +29 -0
- data/bin/dpl +5 -0
- data/dpl.gemspec +32 -0
- data/lib/dpl/cli.rb +66 -0
- data/lib/dpl/error.rb +3 -0
- data/lib/dpl/provider.rb +264 -0
- data/lib/dpl/provider/anynines.rb +13 -0
- data/lib/dpl/provider/appfog.rb +21 -0
- data/lib/dpl/provider/atlas.rb +108 -0
- data/lib/dpl/provider/azure_webapps.rb +48 -0
- data/lib/dpl/provider/bintray.rb +509 -0
- data/lib/dpl/provider/bitballoon.rb +22 -0
- data/lib/dpl/provider/bluemix_cloud_foundry.rb +23 -0
- data/lib/dpl/provider/boxfuse.rb +57 -0
- data/lib/dpl/provider/catalyze.rb +49 -0
- data/lib/dpl/provider/chef_supermarket.rb +85 -0
- data/lib/dpl/provider/cloud66.rb +38 -0
- data/lib/dpl/provider/cloud_files.rb +38 -0
- data/lib/dpl/provider/cloud_foundry.rb +43 -0
- data/lib/dpl/provider/code_deploy.rb +123 -0
- data/lib/dpl/provider/deis.rb +119 -0
- data/lib/dpl/provider/divshot.rb +23 -0
- data/lib/dpl/provider/elastic_beanstalk.rb +195 -0
- data/lib/dpl/provider/engine_yard.rb +90 -0
- data/lib/dpl/provider/firebase.rb +27 -0
- data/lib/dpl/provider/gae.rb +97 -0
- data/lib/dpl/provider/gcs.rb +59 -0
- data/lib/dpl/provider/hackage.rb +29 -0
- data/lib/dpl/provider/heroku.rb +18 -0
- data/lib/dpl/provider/heroku/api.rb +98 -0
- data/lib/dpl/provider/heroku/generic.rb +94 -0
- data/lib/dpl/provider/heroku/git.rb +28 -0
- data/lib/dpl/provider/lambda.rb +236 -0
- data/lib/dpl/provider/launchpad.rb +48 -0
- data/lib/dpl/provider/modulus.rb +23 -0
- data/lib/dpl/provider/npm.rb +64 -0
- data/lib/dpl/provider/openshift.rb +59 -0
- data/lib/dpl/provider/ops_works.rb +132 -0
- data/lib/dpl/provider/packagecloud.rb +144 -0
- data/lib/dpl/provider/pages.rb +79 -0
- data/lib/dpl/provider/puppet_forge.rb +43 -0
- data/lib/dpl/provider/pypi.rb +111 -0
- data/lib/dpl/provider/releases.rb +139 -0
- data/lib/dpl/provider/rubygems.rb +51 -0
- data/lib/dpl/provider/s3.rb +123 -0
- data/lib/dpl/provider/scalingo.rb +97 -0
- data/lib/dpl/provider/script.rb +29 -0
- data/lib/dpl/provider/surge.rb +33 -0
- data/lib/dpl/provider/testfairy.rb +190 -0
- data/lib/dpl/provider/transifex.rb +45 -0
- data/lib/dpl/version.rb +3 -0
- data/notes/engine_yard.md +1 -0
- data/notes/heroku.md +3 -0
- data/spec/cli_spec.rb +36 -0
- data/spec/provider/anynines_spec.rb +20 -0
- data/spec/provider/appfog_spec.rb +35 -0
- data/spec/provider/atlas_spec.rb +99 -0
- data/spec/provider/azure_webapps_spec.rb +95 -0
- data/spec/provider/bintray_spec.rb +259 -0
- data/spec/provider/bitballoon_spec.rb +32 -0
- data/spec/provider/bluemixcloudfoundry_spec.rb +23 -0
- data/spec/provider/boxfuse_spec.rb +16 -0
- data/spec/provider/catalyze_spec.rb +39 -0
- data/spec/provider/chef_supermarket_spec.rb +51 -0
- data/spec/provider/cloud66_spec.rb +44 -0
- data/spec/provider/cloud_files_spec.rb +88 -0
- data/spec/provider/cloudfoundry_spec.rb +71 -0
- data/spec/provider/code_deploy_spec.rb +360 -0
- data/spec/provider/deis_spec.rb +116 -0
- data/spec/provider/divshot_spec.rb +28 -0
- data/spec/provider/elastic_beanstalk_spec.rb +209 -0
- data/spec/provider/firebase_spec.rb +40 -0
- data/spec/provider/gae_spec.rb +26 -0
- data/spec/provider/gcs_spec.rb +115 -0
- data/spec/provider/hackage_spec.rb +47 -0
- data/spec/provider/heroku_spec.rb +357 -0
- data/spec/provider/lambda_spec.rb +432 -0
- data/spec/provider/launchpad_spec.rb +33 -0
- data/spec/provider/modulus_spec.rb +29 -0
- data/spec/provider/npm_spec.rb +95 -0
- data/spec/provider/openshift_spec.rb +91 -0
- data/spec/provider/ops_works_spec.rb +127 -0
- data/spec/provider/packagecloud_spec.rb +56 -0
- data/spec/provider/puppet_forge_spec.rb +60 -0
- data/spec/provider/pypi_spec.rb +103 -0
- data/spec/provider/releases_spec.rb +303 -0
- data/spec/provider/rubygems_spec.rb +106 -0
- data/spec/provider/s3_spec.rb +174 -0
- data/spec/provider/scalingo_spec.rb +64 -0
- data/spec/provider/script_spec.rb +26 -0
- data/spec/provider/surge_spec.rb +15 -0
- data/spec/provider/testfairy_spec.rb +86 -0
- data/spec/provider/transifex_spec.rb +110 -0
- data/spec/provider_spec.rb +210 -0
- data/spec/spec_helper.rb +20 -0
- metadata +279 -0
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
module DPL
|
|
2
|
+
class Provider
|
|
3
|
+
class AzureWebApps < Provider
|
|
4
|
+
def config
|
|
5
|
+
{
|
|
6
|
+
"username" => options[:username] || context.env['AZURE_WA_USERNAME'],
|
|
7
|
+
"password" => options[:password] || context.env['AZURE_WA_PASSWORD'],
|
|
8
|
+
"site" => options[:site] || context.env['AZURE_WA_SITE'],
|
|
9
|
+
"slot" => options[:slot] || context.env['AZURE_WA_SLOT']
|
|
10
|
+
}
|
|
11
|
+
end
|
|
12
|
+
|
|
13
|
+
def git_target
|
|
14
|
+
"https://#{config['username']}:#{config['password']}@#{config['slot'] || config['site']}.scm.azurewebsites.net:443/#{config['site']}.git"
|
|
15
|
+
end
|
|
16
|
+
|
|
17
|
+
def needs_key?
|
|
18
|
+
false
|
|
19
|
+
end
|
|
20
|
+
|
|
21
|
+
def check_app
|
|
22
|
+
end
|
|
23
|
+
|
|
24
|
+
def check_auth
|
|
25
|
+
error "missing Azure Git Deployment username" unless config['username']
|
|
26
|
+
error "missing Azure Git Deployment password" unless config['password']
|
|
27
|
+
error "missing Azure Web App name" unless config['site']
|
|
28
|
+
end
|
|
29
|
+
|
|
30
|
+
def push_app
|
|
31
|
+
log "Deploying to Azure Web App '#{config['slot'] || config['site']}'"
|
|
32
|
+
|
|
33
|
+
if !!options[:skip_cleanup]
|
|
34
|
+
log "Skipping Cleanup"
|
|
35
|
+
context.shell "git checkout HEAD"
|
|
36
|
+
context.shell "git add . --all --force"
|
|
37
|
+
context.shell "git commit -m \"Skip Cleanup Commit\""
|
|
38
|
+
end
|
|
39
|
+
|
|
40
|
+
if !!options[:verbose]
|
|
41
|
+
context.shell "git push --force --quiet #{git_target} HEAD:refs/heads/master"
|
|
42
|
+
else
|
|
43
|
+
context.shell "git push --force --quiet #{git_target} HEAD:refs/heads/master > /dev/null 2>&1"
|
|
44
|
+
end
|
|
45
|
+
end
|
|
46
|
+
end
|
|
47
|
+
end
|
|
48
|
+
end
|
|
@@ -0,0 +1,509 @@
|
|
|
1
|
+
require 'json'
|
|
2
|
+
require 'net/http'
|
|
3
|
+
require "uri"
|
|
4
|
+
require 'find'
|
|
5
|
+
|
|
6
|
+
module DPL
|
|
7
|
+
class Provider
|
|
8
|
+
class Bintray < Provider
|
|
9
|
+
def check_auth
|
|
10
|
+
end
|
|
11
|
+
|
|
12
|
+
def needs_key?
|
|
13
|
+
false
|
|
14
|
+
end
|
|
15
|
+
|
|
16
|
+
attr_accessor :test_mode
|
|
17
|
+
attr_reader :user
|
|
18
|
+
attr_reader :key
|
|
19
|
+
attr_reader :file
|
|
20
|
+
attr_reader :passphrase
|
|
21
|
+
attr_reader :url
|
|
22
|
+
attr_reader :dry_run
|
|
23
|
+
attr_reader :descriptor
|
|
24
|
+
|
|
25
|
+
def initialize(*args)
|
|
26
|
+
super(*args)
|
|
27
|
+
@test_mode = false
|
|
28
|
+
@user = options[:user]
|
|
29
|
+
@key = options[:key]
|
|
30
|
+
@url = options[:url]
|
|
31
|
+
@file = options[:file]
|
|
32
|
+
@passphrase = options[:passphrase]
|
|
33
|
+
@dry_run = options[:dry_run]
|
|
34
|
+
|
|
35
|
+
if @user.nil?
|
|
36
|
+
abort("The 'user' argument is required")
|
|
37
|
+
end
|
|
38
|
+
if @key.nil?
|
|
39
|
+
abort("The 'key' argument is required")
|
|
40
|
+
end
|
|
41
|
+
if @file.nil?
|
|
42
|
+
abort("The 'file' argument is required")
|
|
43
|
+
end
|
|
44
|
+
if @url.nil?
|
|
45
|
+
@url = 'https://api.bintray.com'
|
|
46
|
+
end
|
|
47
|
+
if @dry_run.nil?
|
|
48
|
+
@dry_run = false
|
|
49
|
+
end
|
|
50
|
+
end
|
|
51
|
+
|
|
52
|
+
def read_descriptor
|
|
53
|
+
log "Reading descriptor file: #{file}"
|
|
54
|
+
@descriptor = JSON.parse(File.read(file))
|
|
55
|
+
end
|
|
56
|
+
|
|
57
|
+
def descriptor=(json)
|
|
58
|
+
@descriptor = JSON.parse(json)
|
|
59
|
+
end
|
|
60
|
+
|
|
61
|
+
def head_request(path)
|
|
62
|
+
url = URI.parse(self.url)
|
|
63
|
+
req = Net::HTTP::Head.new(path)
|
|
64
|
+
req.basic_auth user, key
|
|
65
|
+
|
|
66
|
+
sock = Net::HTTP.new(url.host, url.port)
|
|
67
|
+
sock.use_ssl = true
|
|
68
|
+
res = sock.start {|http| http.request(req) }
|
|
69
|
+
|
|
70
|
+
return res
|
|
71
|
+
end
|
|
72
|
+
|
|
73
|
+
def post_request(path, body)
|
|
74
|
+
req = Net::HTTP::Post.new(path)
|
|
75
|
+
req.add_field('Content-Type', 'application/json')
|
|
76
|
+
req.basic_auth user, key
|
|
77
|
+
if !body.nil?
|
|
78
|
+
req.body = body.to_json
|
|
79
|
+
end
|
|
80
|
+
|
|
81
|
+
url = URI.parse(self.url)
|
|
82
|
+
sock = Net::HTTP.new(url.host, url.port)
|
|
83
|
+
sock.use_ssl = true
|
|
84
|
+
res = sock.start {|http| http.request(req) }
|
|
85
|
+
return res
|
|
86
|
+
end
|
|
87
|
+
|
|
88
|
+
def put_file_request(local_file_path, upload_path, matrix_params)
|
|
89
|
+
url = URI.parse(self.url)
|
|
90
|
+
|
|
91
|
+
file = File.open(local_file_path, 'rb')
|
|
92
|
+
data = file.read()
|
|
93
|
+
http = Net::HTTP.new(url.host, url.port)
|
|
94
|
+
http.use_ssl = true
|
|
95
|
+
|
|
96
|
+
params = ''
|
|
97
|
+
if !matrix_params.nil?
|
|
98
|
+
matrix_params.each do |key, val|
|
|
99
|
+
params << ";#{key}=#{val}"
|
|
100
|
+
end
|
|
101
|
+
upload_path << params
|
|
102
|
+
end
|
|
103
|
+
|
|
104
|
+
request = Net::HTTP::Put.new("#{upload_path}")
|
|
105
|
+
request.basic_auth user, key
|
|
106
|
+
request.body = data
|
|
107
|
+
|
|
108
|
+
return http.request(request)
|
|
109
|
+
end
|
|
110
|
+
|
|
111
|
+
def upload_file(artifact)
|
|
112
|
+
log "Uploading file '#{artifact.local_path}' to #{artifact.upload_path}"
|
|
113
|
+
|
|
114
|
+
if dry_run
|
|
115
|
+
return
|
|
116
|
+
end
|
|
117
|
+
|
|
118
|
+
package = descriptor["package"]
|
|
119
|
+
version = descriptor["version"]
|
|
120
|
+
package_name = package["name"]
|
|
121
|
+
subject = package["subject"]
|
|
122
|
+
repo = package["repo"]
|
|
123
|
+
version_name = version["name"]
|
|
124
|
+
|
|
125
|
+
path = "/content/#{subject}/#{repo}/#{package_name}/#{version_name}/#{artifact.upload_path}"
|
|
126
|
+
res = put_file_request(artifact.local_path, path, artifact.matrix_params)
|
|
127
|
+
log_bintray_response(res)
|
|
128
|
+
end
|
|
129
|
+
|
|
130
|
+
def package_exists_path
|
|
131
|
+
package = descriptor["package"]
|
|
132
|
+
subject = package["subject"]
|
|
133
|
+
name = package["name"]
|
|
134
|
+
repo = package["repo"]
|
|
135
|
+
return "/packages/#{subject}/#{repo}/#{name}"
|
|
136
|
+
end
|
|
137
|
+
|
|
138
|
+
def package_exists?
|
|
139
|
+
path = package_exists_path
|
|
140
|
+
if !dry_run
|
|
141
|
+
res = head_request(path)
|
|
142
|
+
code = res.code.to_i
|
|
143
|
+
else
|
|
144
|
+
code = 404
|
|
145
|
+
end
|
|
146
|
+
|
|
147
|
+
if code == 404
|
|
148
|
+
return false
|
|
149
|
+
end
|
|
150
|
+
if code == 201 || code == 200
|
|
151
|
+
return true
|
|
152
|
+
end
|
|
153
|
+
name = descriptor["package"]["name"]
|
|
154
|
+
abort("Unexpected HTTP response code #{code} returned from Bintray while checking if package '#{name}' exists. " +
|
|
155
|
+
"Response message: #{res.message}")
|
|
156
|
+
end
|
|
157
|
+
|
|
158
|
+
def version_exists_path
|
|
159
|
+
package = descriptor["package"]
|
|
160
|
+
version = descriptor["version"]
|
|
161
|
+
package_name = package["name"]
|
|
162
|
+
subject = package["subject"]
|
|
163
|
+
repo = package["repo"]
|
|
164
|
+
version_name = version["name"]
|
|
165
|
+
|
|
166
|
+
return "/packages/#{subject}/#{repo}/#{package_name}/versions/#{version_name}"
|
|
167
|
+
end
|
|
168
|
+
|
|
169
|
+
def version_exists?
|
|
170
|
+
path = version_exists_path
|
|
171
|
+
if !dry_run
|
|
172
|
+
res = head_request(path)
|
|
173
|
+
code = res.code.to_i
|
|
174
|
+
else
|
|
175
|
+
code = 404
|
|
176
|
+
end
|
|
177
|
+
|
|
178
|
+
if code == 404
|
|
179
|
+
return false
|
|
180
|
+
end
|
|
181
|
+
if code == 201 || code == 200
|
|
182
|
+
return true
|
|
183
|
+
end
|
|
184
|
+
version_name = descriptor["version"]["name"]
|
|
185
|
+
abort("Unexpected HTTP response code #{code} returned from Bintray while checking if version '#{version_name}' exists. " +
|
|
186
|
+
"Response message: #{res.message}")
|
|
187
|
+
end
|
|
188
|
+
|
|
189
|
+
def create_package
|
|
190
|
+
package = descriptor["package"]
|
|
191
|
+
repo = package["repo"]
|
|
192
|
+
body = {}
|
|
193
|
+
|
|
194
|
+
add_to_map(body, package, "name")
|
|
195
|
+
add_to_map(body, package, "desc")
|
|
196
|
+
add_to_map(body, package, "licenses")
|
|
197
|
+
add_to_map(body, package, "labels")
|
|
198
|
+
add_to_map(body, package, "vcs_url")
|
|
199
|
+
add_to_map(body, package, "website_url")
|
|
200
|
+
add_to_map(body, package, "issue_tracker_url")
|
|
201
|
+
add_to_map(body, package, "public_download_numbers")
|
|
202
|
+
add_to_map(body, package, "public_stats")
|
|
203
|
+
|
|
204
|
+
subject = package["subject"]
|
|
205
|
+
package_name = package["name"]
|
|
206
|
+
log "Creating package '#{package_name}'..."
|
|
207
|
+
|
|
208
|
+
path = "/packages/#{subject}/#{repo}"
|
|
209
|
+
if !dry_run
|
|
210
|
+
res = post_request(path, body)
|
|
211
|
+
log_bintray_response(res)
|
|
212
|
+
code = res.code.to_i
|
|
213
|
+
else
|
|
214
|
+
code = 200
|
|
215
|
+
end
|
|
216
|
+
|
|
217
|
+
if !test_mode
|
|
218
|
+
if code == 201 || code == 200
|
|
219
|
+
add_package_attributes
|
|
220
|
+
end
|
|
221
|
+
end
|
|
222
|
+
RequestDetails.new(path, body)
|
|
223
|
+
end
|
|
224
|
+
|
|
225
|
+
def add_package_attributes
|
|
226
|
+
package = descriptor["package"]
|
|
227
|
+
repo = package["repo"]
|
|
228
|
+
subject = package["subject"]
|
|
229
|
+
package_name = package["name"]
|
|
230
|
+
attributes = package["attributes"]
|
|
231
|
+
path = nil
|
|
232
|
+
if !attributes.nil?
|
|
233
|
+
log "Adding attributes for package '#{package_name}'..."
|
|
234
|
+
path = "/packages/#{subject}/#{repo}/#{package_name}/attributes"
|
|
235
|
+
if !dry_run
|
|
236
|
+
res = post_request(path, attributes)
|
|
237
|
+
log_bintray_response(res)
|
|
238
|
+
end
|
|
239
|
+
end
|
|
240
|
+
RequestDetails.new(path, attributes)
|
|
241
|
+
end
|
|
242
|
+
|
|
243
|
+
def create_version
|
|
244
|
+
package = descriptor["package"]
|
|
245
|
+
version = descriptor["version"]
|
|
246
|
+
repo = package["repo"]
|
|
247
|
+
body = {}
|
|
248
|
+
|
|
249
|
+
add_to_map(body, version, "name")
|
|
250
|
+
add_to_map(body, version, "desc")
|
|
251
|
+
add_to_map(body, version, "released")
|
|
252
|
+
add_to_map(body, version, "vcs_tag")
|
|
253
|
+
add_to_map(body, version, "github_release_notes_file")
|
|
254
|
+
add_to_map(body, version, "github_use_tag_release_notes")
|
|
255
|
+
add_to_map(body, version, "attributes")
|
|
256
|
+
|
|
257
|
+
package_name = package["name"]
|
|
258
|
+
subject = package["subject"]
|
|
259
|
+
version_name = version["name"]
|
|
260
|
+
log "Creating version '#{version_name}'..."
|
|
261
|
+
|
|
262
|
+
path = "/packages/#{subject}/#{repo}/#{package_name}/versions"
|
|
263
|
+
if !dry_run
|
|
264
|
+
res = post_request(path, body)
|
|
265
|
+
log_bintray_response(res)
|
|
266
|
+
code = res.code.to_i
|
|
267
|
+
else
|
|
268
|
+
code = 200
|
|
269
|
+
end
|
|
270
|
+
|
|
271
|
+
if !test_mode
|
|
272
|
+
if code == 201 || code == 200
|
|
273
|
+
add_version_attributes
|
|
274
|
+
end
|
|
275
|
+
end
|
|
276
|
+
RequestDetails.new(path, body)
|
|
277
|
+
end
|
|
278
|
+
|
|
279
|
+
def add_version_attributes
|
|
280
|
+
package = descriptor["package"]
|
|
281
|
+
package_name = package["name"]
|
|
282
|
+
subject = package["subject"]
|
|
283
|
+
version = descriptor["version"]
|
|
284
|
+
version_name = version["name"]
|
|
285
|
+
repo = package["repo"]
|
|
286
|
+
attributes = version["attributes"]
|
|
287
|
+
path = nil
|
|
288
|
+
if !attributes.nil?
|
|
289
|
+
log "Adding attributes for version '#{version_name}'..."
|
|
290
|
+
path = "/packages/#{subject}/#{repo}/#{package_name}/versions/#{version_name}/attributes"
|
|
291
|
+
if !dry_run
|
|
292
|
+
res = post_request(path, attributes)
|
|
293
|
+
log_bintray_response(res)
|
|
294
|
+
end
|
|
295
|
+
end
|
|
296
|
+
RequestDetails.new(path, attributes)
|
|
297
|
+
end
|
|
298
|
+
|
|
299
|
+
def check_and_create_package
|
|
300
|
+
if !package_exists?
|
|
301
|
+
create_package
|
|
302
|
+
end
|
|
303
|
+
end
|
|
304
|
+
|
|
305
|
+
def check_and_create_version
|
|
306
|
+
if !version_exists?
|
|
307
|
+
create_version
|
|
308
|
+
end
|
|
309
|
+
end
|
|
310
|
+
|
|
311
|
+
def upload_files
|
|
312
|
+
files = files_to_upload
|
|
313
|
+
|
|
314
|
+
files.each do |key, artifact|
|
|
315
|
+
upload_file(artifact)
|
|
316
|
+
end
|
|
317
|
+
end
|
|
318
|
+
|
|
319
|
+
def publish_version
|
|
320
|
+
publish = descriptor["publish"]
|
|
321
|
+
if publish
|
|
322
|
+
package = descriptor["package"]
|
|
323
|
+
version = descriptor["version"]
|
|
324
|
+
repo = package["repo"]
|
|
325
|
+
package_name = package["name"]
|
|
326
|
+
subject = package["subject"]
|
|
327
|
+
version_name = version["name"]
|
|
328
|
+
|
|
329
|
+
log "Publishing version '#{version_name}' of package '#{package_name}'..."
|
|
330
|
+
path = "/content/#{subject}/#{repo}/#{package_name}/#{version_name}/publish"
|
|
331
|
+
if !dry_run
|
|
332
|
+
res = post_request(path, nil)
|
|
333
|
+
log_bintray_response(res)
|
|
334
|
+
end
|
|
335
|
+
end
|
|
336
|
+
RequestDetails.new(path, nil)
|
|
337
|
+
end
|
|
338
|
+
|
|
339
|
+
def gpg_sign_version
|
|
340
|
+
version = descriptor["version"]
|
|
341
|
+
gpg_sign = version["gpgSign"]
|
|
342
|
+
if gpg_sign
|
|
343
|
+
package = descriptor["package"]
|
|
344
|
+
repo = package["repo"]
|
|
345
|
+
package_name = package["name"]
|
|
346
|
+
subject = package["subject"]
|
|
347
|
+
version_name = version["name"]
|
|
348
|
+
|
|
349
|
+
body = nil
|
|
350
|
+
if !passphrase.nil?
|
|
351
|
+
log "Signing version with no passphrase..."
|
|
352
|
+
body = {}
|
|
353
|
+
body["passphrase"] = passphrase
|
|
354
|
+
else
|
|
355
|
+
log "Signing version with passphrase..."
|
|
356
|
+
end
|
|
357
|
+
|
|
358
|
+
path = "/gpg/#{subject}/#{repo}/#{package_name}/versions/#{version_name}"
|
|
359
|
+
if !dry_run
|
|
360
|
+
res = post_request(path, body)
|
|
361
|
+
log_bintray_response(res)
|
|
362
|
+
end
|
|
363
|
+
RequestDetails.new(path, body)
|
|
364
|
+
end
|
|
365
|
+
end
|
|
366
|
+
|
|
367
|
+
# Get the root path from which to start collecting files to be
|
|
368
|
+
# uploaded to Bintray.
|
|
369
|
+
def root_path(str)
|
|
370
|
+
index = str.index('(')
|
|
371
|
+
path = nil
|
|
372
|
+
if index.nil? || str.start_with?('(')
|
|
373
|
+
path = str
|
|
374
|
+
else
|
|
375
|
+
path = str[0, index]
|
|
376
|
+
end
|
|
377
|
+
|
|
378
|
+
if !test_mode && !File.exist?(path)
|
|
379
|
+
log "Warning: Path: #{path} does not exist."
|
|
380
|
+
return nil
|
|
381
|
+
end
|
|
382
|
+
return path
|
|
383
|
+
end
|
|
384
|
+
|
|
385
|
+
# Fills a map with Artifact objects which match
|
|
386
|
+
# the include pattern and do not match the exclude pattern.
|
|
387
|
+
# The artifacts are files collected from the file system.
|
|
388
|
+
def fill_files_map(map, include_pattern, exclude_pattern, upload_pattern, matrix_params)
|
|
389
|
+
# Get the root path from which to start collecting the files.
|
|
390
|
+
root_path = root_path(include_pattern)
|
|
391
|
+
if root_path.nil?
|
|
392
|
+
return
|
|
393
|
+
end
|
|
394
|
+
|
|
395
|
+
# Start scanning the root path recursively.
|
|
396
|
+
Find.find(root_path) do |path|
|
|
397
|
+
add_if_matches(map, path, include_pattern, exclude_pattern, upload_pattern, matrix_params)
|
|
398
|
+
end
|
|
399
|
+
end
|
|
400
|
+
|
|
401
|
+
def add_if_matches(map, path, include_pattern, exclude_pattern, upload_pattern, matrix_params)
|
|
402
|
+
res = path.match(/#{include_pattern}/)
|
|
403
|
+
|
|
404
|
+
# If the file matches the include pattern and it is not a directory.
|
|
405
|
+
# In case test_mode is set, we do not check if the file exists.
|
|
406
|
+
if !res.nil? && (test_mode || File.file?(path))
|
|
407
|
+
# If the file does not match the exclude pattern.
|
|
408
|
+
if exclude_pattern.nil? || exclude_pattern.empty? || !path.match(/#{exclude_pattern}/)
|
|
409
|
+
# Using the capturing groups in the include pattern, replace the $1, $2, ...
|
|
410
|
+
# in the upload pattern.
|
|
411
|
+
groups = res.captures
|
|
412
|
+
replaced_upload_pattern = upload_pattern
|
|
413
|
+
for i in 0..groups.size-1
|
|
414
|
+
replaced_upload_pattern = replaced_upload_pattern.gsub("$#{i+1}", groups[i])
|
|
415
|
+
end
|
|
416
|
+
map[path] = Artifact.new(path, replaced_upload_pattern, matrix_params)
|
|
417
|
+
end
|
|
418
|
+
end
|
|
419
|
+
end
|
|
420
|
+
|
|
421
|
+
# Returns a map containing Artifact objects.
|
|
422
|
+
# The map contains the files to be uploaded to Bintray.
|
|
423
|
+
def files_to_upload
|
|
424
|
+
upload_files = Hash.new()
|
|
425
|
+
files = descriptor["files"]
|
|
426
|
+
if files.nil?
|
|
427
|
+
return upload_files
|
|
428
|
+
end
|
|
429
|
+
|
|
430
|
+
files.each { |patterns|
|
|
431
|
+
fill_files_map(
|
|
432
|
+
upload_files,
|
|
433
|
+
patterns["includePattern"],
|
|
434
|
+
patterns["excludePattern"],
|
|
435
|
+
patterns["uploadPattern"],
|
|
436
|
+
patterns["matrixParams"])
|
|
437
|
+
}
|
|
438
|
+
|
|
439
|
+
return upload_files
|
|
440
|
+
end
|
|
441
|
+
|
|
442
|
+
def deploy
|
|
443
|
+
read_descriptor
|
|
444
|
+
check_and_create_package
|
|
445
|
+
check_and_create_version
|
|
446
|
+
upload_files
|
|
447
|
+
gpg_sign_version
|
|
448
|
+
publish_version
|
|
449
|
+
end
|
|
450
|
+
|
|
451
|
+
# Copies a key from one map to another, if the key exists there.
|
|
452
|
+
def add_to_map(to_map, from_map, key)
|
|
453
|
+
if !from_map[key].nil?
|
|
454
|
+
to_map[key] = from_map[key]
|
|
455
|
+
end
|
|
456
|
+
end
|
|
457
|
+
|
|
458
|
+
def log_bintray_response(res)
|
|
459
|
+
msg = ''
|
|
460
|
+
if !res.body.nil?
|
|
461
|
+
begin
|
|
462
|
+
response = JSON.parse(res.body)
|
|
463
|
+
msg = response["message"]
|
|
464
|
+
rescue
|
|
465
|
+
end
|
|
466
|
+
end
|
|
467
|
+
|
|
468
|
+
log "Bintray response: #{res.code.to_i} #{res.message}. #{msg}"
|
|
469
|
+
end
|
|
470
|
+
|
|
471
|
+
def log(msg)
|
|
472
|
+
puts "[Bintray Upload] #{msg}"
|
|
473
|
+
end
|
|
474
|
+
|
|
475
|
+
# This class represents an artifact (file) to be uploaded to Bintray.
|
|
476
|
+
class Artifact
|
|
477
|
+
def initialize(local_path, upload_path, matrix_params)
|
|
478
|
+
@local_path = local_path
|
|
479
|
+
@upload_path = upload_path
|
|
480
|
+
@matrix_params = matrix_params
|
|
481
|
+
end
|
|
482
|
+
|
|
483
|
+
def hash
|
|
484
|
+
return @localPath.hash
|
|
485
|
+
end
|
|
486
|
+
|
|
487
|
+
def eql?(other)
|
|
488
|
+
@localPath == other.local_path
|
|
489
|
+
end
|
|
490
|
+
|
|
491
|
+
attr_reader :local_path
|
|
492
|
+
attr_reader :upload_path
|
|
493
|
+
attr_reader :matrix_params
|
|
494
|
+
end
|
|
495
|
+
|
|
496
|
+
# Used to return the path and body of REST requests sent to Bintray.
|
|
497
|
+
# Used for testing.
|
|
498
|
+
class RequestDetails
|
|
499
|
+
def initialize(path, body)
|
|
500
|
+
@path = path
|
|
501
|
+
@body = body
|
|
502
|
+
end
|
|
503
|
+
|
|
504
|
+
attr_reader :path
|
|
505
|
+
attr_reader :body
|
|
506
|
+
end
|
|
507
|
+
end
|
|
508
|
+
end
|
|
509
|
+
end
|