autopub 0.2.2__py3-none-any.whl → 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- autopub/base copy.py +159 -0
- autopub/base.py +27 -26
- autopub/build_release.py +6 -7
- autopub/check_release.py +6 -2
- autopub/create_github_release.py +5 -5
- autopub/prepare_release.py +19 -14
- autopub/publish_release.py +9 -14
- autopub/vendor/__init__.py +0 -0
- autopub/vendor/github_release.py +1164 -0
- {autopub-0.2.2.dist-info → autopub-0.4.0.dist-info}/METADATA +12 -12
- autopub-0.4.0.dist-info/RECORD +20 -0
- {autopub-0.2.2.dist-info → autopub-0.4.0.dist-info}/WHEEL +1 -1
- autopub-0.2.2.dist-info/RECORD +0 -17
- {autopub-0.2.2.dist-info → autopub-0.4.0.dist-info}/LICENSE +0 -0
- {autopub-0.2.2.dist-info → autopub-0.4.0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,1164 @@
|
|
1
|
+
#!/usr/bin/env python2.7
|
2
|
+
|
3
|
+
from __future__ import print_function
|
4
|
+
|
5
|
+
from datetime import tzinfo, timedelta, datetime
|
6
|
+
import fnmatch
|
7
|
+
import glob
|
8
|
+
import json
|
9
|
+
import os
|
10
|
+
import sys
|
11
|
+
import tempfile
|
12
|
+
import time
|
13
|
+
import types
|
14
|
+
|
15
|
+
|
16
|
+
from functools import wraps
|
17
|
+
from pprint import pprint
|
18
|
+
|
19
|
+
import backoff
|
20
|
+
import click
|
21
|
+
import link_header
|
22
|
+
import requests
|
23
|
+
from requests import request
|
24
|
+
|
25
|
+
|
26
|
+
REQ_BUFFER_SIZE = 65536 # Chunk size when iterating a download body
|
27
|
+
|
28
|
+
_github_token_cli_arg = None
|
29
|
+
_github_api_url = None
|
30
|
+
|
31
|
+
|
32
|
+
class _UTC(tzinfo):
|
33
|
+
"""UTC"""
|
34
|
+
|
35
|
+
ZERO = timedelta(0)
|
36
|
+
|
37
|
+
def utcoffset(self, dt):
|
38
|
+
return self.ZERO
|
39
|
+
|
40
|
+
def tzname(self, dt):
|
41
|
+
return "UTC"
|
42
|
+
|
43
|
+
def dst(self, dt):
|
44
|
+
return self.ZERO
|
45
|
+
|
46
|
+
|
47
|
+
class _NoopProgressReporter(object):
|
48
|
+
reportProgress = False
|
49
|
+
|
50
|
+
def __init__(self, label='', length=0):
|
51
|
+
self.label = label
|
52
|
+
self.length = length
|
53
|
+
|
54
|
+
def update(self, chunk_size):
|
55
|
+
pass
|
56
|
+
|
57
|
+
def __enter__(self):
|
58
|
+
return self
|
59
|
+
|
60
|
+
def __exit__(self, exc_type, exc_value, tb):
|
61
|
+
pass
|
62
|
+
|
63
|
+
|
64
|
+
progress_reporter_cls = _NoopProgressReporter
|
65
|
+
"""The progress reporter class to instantiate. This class
|
66
|
+
is expected to be a context manager with a constructor accepting `label`
|
67
|
+
and `length` keyword arguments, an `update` method accepting a `chunk_size`
|
68
|
+
argument and a class attribute `reportProgress` set to True (It can
|
69
|
+
conveniently be initialized using `sys.stdout.isatty()`)
|
70
|
+
"""
|
71
|
+
|
72
|
+
|
73
|
+
def _request(*args, **kwargs):
|
74
|
+
with_auth = kwargs.pop("with_auth", True)
|
75
|
+
token = _github_token_cli_arg
|
76
|
+
if not token:
|
77
|
+
token = os.environ.get("GITHUB_TOKEN", None)
|
78
|
+
if token and with_auth:
|
79
|
+
# Using Bearer token authentication instead of Basic Authentication
|
80
|
+
kwargs['headers'] = kwargs.get('headers', {})
|
81
|
+
kwargs['headers']['Authorization'] = 'Bearer ' + token
|
82
|
+
for _ in range(3):
|
83
|
+
response = request(*args, **kwargs)
|
84
|
+
is_travis = os.getenv("TRAVIS", None) is not None
|
85
|
+
if is_travis and 400 <= response.status_code < 500:
|
86
|
+
print("Retrying in 1s (%s Client Error: %s for url: %s)" % (
|
87
|
+
response.status_code, response.reason, response.url))
|
88
|
+
time.sleep(1)
|
89
|
+
continue
|
90
|
+
break
|
91
|
+
return response
|
92
|
+
|
93
|
+
|
94
|
+
def handle_http_error(func):
|
95
|
+
@wraps(func)
|
96
|
+
def with_error_handling(*args, **kwargs):
|
97
|
+
try:
|
98
|
+
return func(*args, **kwargs)
|
99
|
+
except requests.exceptions.HTTPError as e:
|
100
|
+
print('Error sending {0} to {1}'.format(
|
101
|
+
e.request.method, e.request.url))
|
102
|
+
print('<', e.request.method, e.request.path_url)
|
103
|
+
for k in sorted(e.request.headers.keys()):
|
104
|
+
print('<', k, ':', e.request.headers[k])
|
105
|
+
if e.request.body:
|
106
|
+
print('<')
|
107
|
+
print('<', repr(e.request.body[:35]),
|
108
|
+
'(total {0} bytes of data)'.format(len(e.request.body)))
|
109
|
+
print('')
|
110
|
+
print('>', e.response.status_code, e.response.reason)
|
111
|
+
for k in sorted(e.response.headers.keys()):
|
112
|
+
print('>', k.title(), ':', e.response.headers[k])
|
113
|
+
if e.response.content:
|
114
|
+
print('>')
|
115
|
+
print('>', repr(e.response.content[:35]),
|
116
|
+
'(total {0} bytes of data)'.format(
|
117
|
+
len(e.response.content)))
|
118
|
+
return 1
|
119
|
+
return with_error_handling
|
120
|
+
|
121
|
+
|
122
|
+
def _check_for_credentials(func):
|
123
|
+
@wraps(func)
|
124
|
+
def with_check_for_credentials(*args, **kwargs):
|
125
|
+
has_github_token_env_var = "GITHUB_TOKEN" in os.environ
|
126
|
+
has_netrc = requests.utils.get_netrc_auth(github_api_url())
|
127
|
+
if (not _github_token_cli_arg
|
128
|
+
and not has_github_token_env_var and not has_netrc):
|
129
|
+
raise EnvironmentError(
|
130
|
+
"This command requires credentials provided by passing "
|
131
|
+
"--github-token CLI argument, set using GITHUB_TOKEN "
|
132
|
+
"env. variable or using netrc file. For more details, "
|
133
|
+
"see https://github.com/j0057/github-release#configuring")
|
134
|
+
return func(*args, **kwargs)
|
135
|
+
return with_check_for_credentials
|
136
|
+
|
137
|
+
|
138
|
+
def _progress_bar(*args, **kwargs):
|
139
|
+
bar = click.progressbar(*args, **kwargs)
|
140
|
+
bar.bar_template = " [%(bar)s] %(info)s %(label)s"
|
141
|
+
bar.show_percent = True
|
142
|
+
bar.show_pos = True
|
143
|
+
|
144
|
+
def formatSize(length):
|
145
|
+
if length == 0:
|
146
|
+
return '%.2f' % length
|
147
|
+
unit = ''
|
148
|
+
# See https://en.wikipedia.org/wiki/Binary_prefix
|
149
|
+
units = ['k', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y']
|
150
|
+
while True:
|
151
|
+
if length <= 1024 or len(units) == 0:
|
152
|
+
break
|
153
|
+
unit = units.pop(0)
|
154
|
+
length /= 1024.
|
155
|
+
return '%.2f%s' % (length, unit)
|
156
|
+
|
157
|
+
def formatPos(_self):
|
158
|
+
pos = formatSize(_self.pos)
|
159
|
+
if _self.length is not None:
|
160
|
+
pos += '/%s' % formatSize(_self.length)
|
161
|
+
return pos
|
162
|
+
|
163
|
+
bar.format_pos = types.MethodType(formatPos, bar)
|
164
|
+
return bar
|
165
|
+
|
166
|
+
|
167
|
+
def _recursive_gh_get(href, items):
|
168
|
+
"""Recursively get list of GitHub objects.
|
169
|
+
|
170
|
+
See https://developer.github.com/v3/guides/traversing-with-pagination/
|
171
|
+
"""
|
172
|
+
response = _request('GET', href)
|
173
|
+
response.raise_for_status()
|
174
|
+
items.extend(response.json())
|
175
|
+
if "link" not in response.headers:
|
176
|
+
return
|
177
|
+
links = link_header.parse(response.headers["link"])
|
178
|
+
rels = {link.rel: link.href for link in links.links}
|
179
|
+
if "next" in rels:
|
180
|
+
_recursive_gh_get(rels["next"], items)
|
181
|
+
|
182
|
+
|
183
|
+
def _validate_repo_name(ctx, param, value):
|
184
|
+
"""Callback used to check if repository argument was given."""
|
185
|
+
if "/" not in value:
|
186
|
+
raise click.BadParameter('Expected format for REPOSITORY is '
|
187
|
+
'"<org_name>/<project_name>" (e.g "jcfr/sandbox")')
|
188
|
+
return value
|
189
|
+
|
190
|
+
|
191
|
+
#
|
192
|
+
# CLI
|
193
|
+
#
|
194
|
+
|
195
|
+
@click.group()
|
196
|
+
@click.option("--github-token", envvar='GITHUB_TOKEN', default=None,
|
197
|
+
help="[default: GITHUB_TOKEN env. variable]")
|
198
|
+
@click.option('--github-api-url', envvar='GITHUB_API_URL',
|
199
|
+
default='https://api.github.com',
|
200
|
+
help='[default: https://api.github.com]')
|
201
|
+
@click.option("--progress/--no-progress", default=True,
|
202
|
+
help="Display progress bar (default: yes).")
|
203
|
+
def main(github_token, github_api_url, progress):
|
204
|
+
"""A CLI to easily manage GitHub releases, assets and references."""
|
205
|
+
global progress_reporter_cls
|
206
|
+
progress_reporter_cls.reportProgress = sys.stdout.isatty() and progress
|
207
|
+
if progress_reporter_cls.reportProgress:
|
208
|
+
progress_reporter_cls = _progress_bar
|
209
|
+
global _github_token_cli_arg
|
210
|
+
_github_token_cli_arg = github_token
|
211
|
+
set_github_api_url(github_api_url)
|
212
|
+
|
213
|
+
|
214
|
+
@main.group("release")
|
215
|
+
@click.argument('repo_name', metavar="REPOSITORY", callback=_validate_repo_name)
|
216
|
+
@click.pass_context
|
217
|
+
@handle_http_error
|
218
|
+
def gh_release(ctx, repo_name):
|
219
|
+
"""Manage releases (list, create, delete, ...) for
|
220
|
+
REPOSITORY (e.g jcfr/sandbox)
|
221
|
+
"""
|
222
|
+
ctx.obj = repo_name
|
223
|
+
|
224
|
+
|
225
|
+
# 1.6.0 (deprecated): Remove this bloc
|
226
|
+
class AssetGroup(click.Group):
|
227
|
+
def get_command(self, ctx, cmd_name):
|
228
|
+
cmd_name = "delete" if cmd_name == "erase" else cmd_name
|
229
|
+
return click.Group.get_command(self, ctx, cmd_name)
|
230
|
+
|
231
|
+
|
232
|
+
@main.group("asset", cls=AssetGroup)
|
233
|
+
@click.argument('repo_name', metavar="REPOSITORY", callback=_validate_repo_name)
|
234
|
+
@click.pass_context
|
235
|
+
@handle_http_error
|
236
|
+
def gh_asset(ctx, repo_name):
|
237
|
+
"""Manage release assets (upload, download, ...) for
|
238
|
+
REPOSITORY (e.g jcfr/sandbox)
|
239
|
+
"""
|
240
|
+
ctx.obj = repo_name
|
241
|
+
|
242
|
+
|
243
|
+
@main.group("ref")
|
244
|
+
@click.argument('repo_name', metavar="REPOSITORY", callback=_validate_repo_name)
|
245
|
+
@click.pass_context
|
246
|
+
@handle_http_error
|
247
|
+
def gh_ref(ctx, repo_name):
|
248
|
+
"""Manage references (list, create, delete, ...) for
|
249
|
+
REPOSITORY (e.g jcfr/sandbox)
|
250
|
+
"""
|
251
|
+
ctx.obj = repo_name
|
252
|
+
|
253
|
+
|
254
|
+
#
|
255
|
+
# General
|
256
|
+
#
|
257
|
+
|
258
|
+
def github_api_url():
|
259
|
+
"""Return GitHub API URL.
|
260
|
+
|
261
|
+
If no URL has been set, return https://api.github.com unless the
|
262
|
+
GITHUB_API_URL environment variable has been set.
|
263
|
+
"""
|
264
|
+
if _github_api_url is None:
|
265
|
+
return os.environ.get('GITHUB_API_URL', 'https://api.github.com')
|
266
|
+
return _github_api_url
|
267
|
+
|
268
|
+
|
269
|
+
def set_github_api_url(url):
|
270
|
+
"""Set GitHub API URL.
|
271
|
+
"""
|
272
|
+
global _github_api_url
|
273
|
+
_github_api_url = url
|
274
|
+
|
275
|
+
|
276
|
+
#
|
277
|
+
# Releases
|
278
|
+
#
|
279
|
+
|
280
|
+
def print_release_info(release, title=None, indent=""):
|
281
|
+
if title is None:
|
282
|
+
title = "release '{0}' info".format(release["tag_name"])
|
283
|
+
print(indent + title)
|
284
|
+
indent = " " + indent
|
285
|
+
print(indent + 'Tag name : {tag_name}'.format(**release))
|
286
|
+
if release['name']:
|
287
|
+
print(indent + 'Name : {name}'.format(**release))
|
288
|
+
print(indent + 'ID : {id}'.format(**release))
|
289
|
+
print(indent + 'Created : {created_at}'.format(**release))
|
290
|
+
print(indent + 'URL : {html_url}'.format(**release))
|
291
|
+
print(indent + 'Author : {login}'.format(**release['author']))
|
292
|
+
print(indent + 'Is published : {0}'.format(not release['draft']))
|
293
|
+
print(indent + 'Is prerelease : {0}'.format(release['prerelease']))
|
294
|
+
if release['body']:
|
295
|
+
print(indent + 'Release notes :')
|
296
|
+
print(indent + release['body'])
|
297
|
+
print('')
|
298
|
+
for (i, asset) in enumerate(release['assets']):
|
299
|
+
print_asset_info(i, asset, indent=indent)
|
300
|
+
|
301
|
+
|
302
|
+
def get_release_type(release):
|
303
|
+
"""Return the type of the release
|
304
|
+
|
305
|
+
Either 'draft', 'prerelease' (no draft) or 'release' (neither)
|
306
|
+
"""
|
307
|
+
if release['draft']:
|
308
|
+
return 'draft'
|
309
|
+
if release['prerelease']:
|
310
|
+
return 'prerelease'
|
311
|
+
return 'release'
|
312
|
+
|
313
|
+
|
314
|
+
@backoff.on_exception(backoff.expo, requests.exceptions.HTTPError, max_time=60)
|
315
|
+
def get_releases(repo_name, verbose=False):
|
316
|
+
|
317
|
+
releases = []
|
318
|
+
_recursive_gh_get(
|
319
|
+
github_api_url() + '/repos/{0}/releases'.format(repo_name), releases)
|
320
|
+
|
321
|
+
if verbose:
|
322
|
+
list(map(print_release_info,
|
323
|
+
sorted(releases, key=lambda r: r['tag_name'])))
|
324
|
+
return releases
|
325
|
+
|
326
|
+
|
327
|
+
@backoff.on_predicate(backoff.expo, lambda x: x is None, max_time=5)
|
328
|
+
def get_release(repo_name, tag_name):
|
329
|
+
"""Return release
|
330
|
+
|
331
|
+
.. note::
|
332
|
+
|
333
|
+
If the release is not found (e.g the release was just created and
|
334
|
+
the GitHub response is not yet updated), this function is called again by
|
335
|
+
leveraging the `backoff` decorator.
|
336
|
+
|
337
|
+
See https://github.com/j0057/github-release/issues/67
|
338
|
+
"""
|
339
|
+
releases = get_releases(repo_name)
|
340
|
+
try:
|
341
|
+
release = next(r for r in releases if r['tag_name'] == tag_name)
|
342
|
+
return release
|
343
|
+
except StopIteration:
|
344
|
+
return None
|
345
|
+
|
346
|
+
|
347
|
+
def get_release_info(repo_name, tag_name):
|
348
|
+
release = get_release(repo_name, tag_name)
|
349
|
+
if release is not None:
|
350
|
+
return release
|
351
|
+
else:
|
352
|
+
raise Exception('Release with tag_name {0} not found'.format(tag_name))
|
353
|
+
|
354
|
+
|
355
|
+
def _update_release_sha(repo_name, tag_name, new_release_sha, dry_run):
|
356
|
+
"""Update the commit associated with a given release tag.
|
357
|
+
|
358
|
+
Since updating a tag commit is not directly possible, this function
|
359
|
+
does the following steps:
|
360
|
+
* set the release tag to ``<tag_name>-tmp`` and associate it
|
361
|
+
with ``new_release_sha``.
|
362
|
+
* delete tag ``refs/tags/<tag_name>``.
|
363
|
+
* update the release tag to ``<tag_name>`` and associate it
|
364
|
+
with ``new_release_sha``.
|
365
|
+
"""
|
366
|
+
if new_release_sha is None:
|
367
|
+
return
|
368
|
+
refs = get_refs(repo_name, tags=True, pattern="refs/tags/%s" % tag_name)
|
369
|
+
if not refs:
|
370
|
+
return
|
371
|
+
assert len(refs) == 1
|
372
|
+
|
373
|
+
# If sha associated with "<tag_name>" is up-to-date, we are done.
|
374
|
+
previous_release_sha = refs[0]["object"]["sha"]
|
375
|
+
if previous_release_sha == new_release_sha:
|
376
|
+
return
|
377
|
+
|
378
|
+
tmp_tag_name = tag_name + "-tmp"
|
379
|
+
|
380
|
+
# If any, remove leftover temporary tag "<tag_name>-tmp"
|
381
|
+
refs = get_refs(repo_name, tags=True, pattern="refs/tags/%s" % tmp_tag_name)
|
382
|
+
if refs:
|
383
|
+
assert len(refs) == 1
|
384
|
+
time.sleep(0.1)
|
385
|
+
gh_ref_delete(repo_name,
|
386
|
+
"refs/tags/%s" % tmp_tag_name, dry_run=dry_run)
|
387
|
+
|
388
|
+
# Update "<tag_name>" release by associating it with the "<tag_name>-tmp"
|
389
|
+
# and "<new_release_sha>". It will create the temporary tag.
|
390
|
+
time.sleep(0.1)
|
391
|
+
patch_release(repo_name, tag_name,
|
392
|
+
tag_name=tmp_tag_name,
|
393
|
+
target_commitish=new_release_sha,
|
394
|
+
dry_run=dry_run)
|
395
|
+
|
396
|
+
# Now "<tag_name>-tmp" references "<new_release_sha>", remove "<tag_name>"
|
397
|
+
time.sleep(0.1)
|
398
|
+
gh_ref_delete(repo_name, "refs/tags/%s" % tag_name, dry_run=dry_run)
|
399
|
+
|
400
|
+
# Finally, update "<tag_name>-tmp" release by associating it with the
|
401
|
+
# "<tag_name>" and "<new_release_sha>".
|
402
|
+
time.sleep(0.1)
|
403
|
+
patch_release(repo_name, tmp_tag_name,
|
404
|
+
tag_name=tag_name,
|
405
|
+
target_commitish=new_release_sha,
|
406
|
+
dry_run=dry_run)
|
407
|
+
|
408
|
+
# ... and remove "<tag_name>-tmp"
|
409
|
+
time.sleep(0.1)
|
410
|
+
gh_ref_delete(repo_name,
|
411
|
+
"refs/tags/%s" % tmp_tag_name, dry_run=dry_run)
|
412
|
+
|
413
|
+
|
414
|
+
def patch_release(repo_name, current_tag_name, **values):
|
415
|
+
dry_run = values.get("dry_run", False)
|
416
|
+
verbose = values.get("verbose", False)
|
417
|
+
release = get_release_info(repo_name, current_tag_name)
|
418
|
+
new_tag_name = values.get("tag_name", release["tag_name"])
|
419
|
+
|
420
|
+
_update_release_sha(
|
421
|
+
repo_name,
|
422
|
+
new_tag_name,
|
423
|
+
values.get("target_commitish", None),
|
424
|
+
dry_run
|
425
|
+
)
|
426
|
+
|
427
|
+
data = {
|
428
|
+
"tag_name": release["tag_name"],
|
429
|
+
"target_commitish": release["target_commitish"],
|
430
|
+
"name": release["name"],
|
431
|
+
"body": release["body"],
|
432
|
+
"draft": release["draft"],
|
433
|
+
"prerelease": release["prerelease"]
|
434
|
+
}
|
435
|
+
|
436
|
+
updated = []
|
437
|
+
for key in data:
|
438
|
+
if key in values and data[key] != values[key]:
|
439
|
+
updated.append("%s: '%s' -> '%s'" % (key, data[key], values[key]))
|
440
|
+
if updated:
|
441
|
+
print("updating '%s' release: \n %s" % (
|
442
|
+
current_tag_name, "\n ".join(updated)))
|
443
|
+
print("")
|
444
|
+
|
445
|
+
if len(values.get("body", "")) >= 125000:
|
446
|
+
raise Exception('Failed to update release {0}. Description has {1} characters and maximum is 125000 characters'.format(
|
447
|
+
release["tag_name"], len(values["body"])))
|
448
|
+
|
449
|
+
data.update(values)
|
450
|
+
|
451
|
+
if not dry_run:
|
452
|
+
url = github_api_url() + '/repos/{0}/releases/{1}'.format(
|
453
|
+
repo_name, release['id'])
|
454
|
+
response = _request(
|
455
|
+
'PATCH', url,
|
456
|
+
data=json.dumps(data),
|
457
|
+
headers={'Content-Type': 'application/json'})
|
458
|
+
response.raise_for_status()
|
459
|
+
|
460
|
+
# In case a new tag name was provided, remove the old one.
|
461
|
+
if current_tag_name != data["tag_name"]:
|
462
|
+
gh_ref_delete(
|
463
|
+
repo_name, "refs/tags/%s" % current_tag_name,
|
464
|
+
tags=True, verbose=verbose, dry_run=dry_run)
|
465
|
+
|
466
|
+
|
467
|
+
def get_assets(repo_name, tag_name, verbose=False):
|
468
|
+
release = get_release(repo_name, tag_name)
|
469
|
+
if not release:
|
470
|
+
raise Exception('Release with tag_name {0} not found'.format(tag_name))
|
471
|
+
|
472
|
+
assets = []
|
473
|
+
_recursive_gh_get(github_api_url() + '/repos/{0}/releases/{1}/assets'.format(
|
474
|
+
repo_name, release["id"]), assets)
|
475
|
+
|
476
|
+
if verbose:
|
477
|
+
for i, asset in enumerate(sorted(assets, key=lambda r: r['name'])):
|
478
|
+
print_asset_info(i, asset)
|
479
|
+
|
480
|
+
return assets
|
481
|
+
|
482
|
+
|
483
|
+
def get_asset_info(repo_name, tag_name, filename):
|
484
|
+
assets = get_assets(repo_name, tag_name)
|
485
|
+
try:
|
486
|
+
asset = next(a for a in assets if a['name'] == filename)
|
487
|
+
return asset
|
488
|
+
except StopIteration:
|
489
|
+
raise Exception('Asset with filename {0} not found in '
|
490
|
+
'release with tag_name {1}'.format(filename, tag_name))
|
491
|
+
|
492
|
+
|
493
|
+
@gh_release.command("list")
|
494
|
+
@click.pass_obj
|
495
|
+
def _cli_release_list(repo_name):
|
496
|
+
"""List releases"""
|
497
|
+
return get_releases(repo_name, verbose=True)
|
498
|
+
|
499
|
+
|
500
|
+
@gh_release.command("info")
|
501
|
+
@click.argument("tag_name")
|
502
|
+
@click.pass_obj
|
503
|
+
def _cli_release_info(repo_name, tag_name):
|
504
|
+
"""Get release description"""
|
505
|
+
release = get_release_info(repo_name, tag_name)
|
506
|
+
print_release_info(release)
|
507
|
+
|
508
|
+
|
509
|
+
@gh_release.command("create")
|
510
|
+
@click.argument("tag_name")
|
511
|
+
@click.argument("asset_pattern", nargs=-1)
|
512
|
+
@click.option("--name")
|
513
|
+
@click.option("--body", default=None)
|
514
|
+
@click.option("--publish", is_flag=True, default=False)
|
515
|
+
@click.option("--prerelease", is_flag=True, default=False)
|
516
|
+
@click.option("--dry-run", is_flag=True, default=False)
|
517
|
+
@click.option("--target-commitish")
|
518
|
+
@click.pass_obj
|
519
|
+
def cli_release_create(*args, **kwargs):
|
520
|
+
"""Create a release"""
|
521
|
+
gh_release_create(*args, **kwargs)
|
522
|
+
|
523
|
+
|
524
|
+
@_check_for_credentials
|
525
|
+
def gh_release_create(repo_name, tag_name, asset_pattern=None, name=None, body=None,
|
526
|
+
publish=False, prerelease=False,
|
527
|
+
target_commitish=None, dry_run=False):
|
528
|
+
if get_release(repo_name, tag_name) is not None:
|
529
|
+
print('release %s: already exists\n' % tag_name)
|
530
|
+
return
|
531
|
+
data = {
|
532
|
+
'tag_name': tag_name,
|
533
|
+
'draft': not publish and not prerelease,
|
534
|
+
'prerelease': prerelease
|
535
|
+
}
|
536
|
+
if name is not None:
|
537
|
+
data["name"] = name
|
538
|
+
if body is not None:
|
539
|
+
data["body"] = body
|
540
|
+
if target_commitish is not None:
|
541
|
+
data["target_commitish"] = target_commitish
|
542
|
+
if not dry_run:
|
543
|
+
response = _request(
|
544
|
+
'POST', github_api_url() + '/repos/{0}/releases'.format(repo_name),
|
545
|
+
data=json.dumps(data),
|
546
|
+
headers={'Content-Type': 'application/json'})
|
547
|
+
response.raise_for_status()
|
548
|
+
print_release_info(response.json(),
|
549
|
+
title="created '%s' release" % tag_name)
|
550
|
+
else:
|
551
|
+
print("created '%s' release (dry_run)" % tag_name)
|
552
|
+
if asset_pattern:
|
553
|
+
gh_asset_upload(repo_name, tag_name, asset_pattern, dry_run=dry_run)
|
554
|
+
|
555
|
+
|
556
|
+
@gh_release.command("edit")
|
557
|
+
@click.argument("current_tag_name")
|
558
|
+
@click.option("--tag-name", default=None)
|
559
|
+
@click.option("--target-commitish", default=None)
|
560
|
+
@click.option("--name", default=None)
|
561
|
+
@click.option("--body", default=None)
|
562
|
+
@click.option("--draft/--publish", is_flag=True, default=None)
|
563
|
+
@click.option("--prerelease/--release", is_flag=True, default=None)
|
564
|
+
@click.option("--dry-run", is_flag=True, default=False)
|
565
|
+
@click.option("--verbose", is_flag=True, default=False)
|
566
|
+
@click.pass_obj
|
567
|
+
def _cli_release_edit(*args, **kwargs):
|
568
|
+
"""Edit a release"""
|
569
|
+
gh_release_edit(*args, **kwargs)
|
570
|
+
|
571
|
+
|
572
|
+
@_check_for_credentials
|
573
|
+
def gh_release_edit(repo_name, current_tag_name,
|
574
|
+
tag_name=None, target_commitish=None, name=None,
|
575
|
+
body=None,
|
576
|
+
draft=None, prerelease=None, dry_run=False, verbose=False):
|
577
|
+
attributes = {}
|
578
|
+
for key in [
|
579
|
+
"tag_name", "target_commitish", "name", "body", "draft",
|
580
|
+
"prerelease", "dry_run", "verbose"
|
581
|
+
]:
|
582
|
+
if locals().get(key, None) is not None:
|
583
|
+
attributes[key] = locals()[key]
|
584
|
+
patch_release(repo_name, current_tag_name, **attributes)
|
585
|
+
|
586
|
+
|
587
|
+
@gh_release.command("delete")
|
588
|
+
@click.argument("pattern")
|
589
|
+
@click.option("--keep-pattern")
|
590
|
+
@click.option("--release-type", type=click.Choice(['all', 'draft', 'prerelease', 'release']), default='all')
|
591
|
+
@click.option("--older-than", type=int, default=0)
|
592
|
+
@click.option("--dry-run", is_flag=True, default=False)
|
593
|
+
@click.option("--verbose", is_flag=True, default=False)
|
594
|
+
@click.pass_obj
|
595
|
+
def _cli_release_delete(*args, **kwargs):
|
596
|
+
"""Delete selected release"""
|
597
|
+
gh_release_delete(*args, **kwargs)
|
598
|
+
|
599
|
+
|
600
|
+
@_check_for_credentials
|
601
|
+
def gh_release_delete(repo_name, pattern, keep_pattern=None, release_type='all', older_than=0,
|
602
|
+
dry_run=False, verbose=False):
|
603
|
+
releases = get_releases(repo_name)
|
604
|
+
candidates = []
|
605
|
+
# Get list of candidate releases
|
606
|
+
for release in releases:
|
607
|
+
if not fnmatch.fnmatch(release['tag_name'], pattern):
|
608
|
+
if verbose:
|
609
|
+
print('skipping release {0}: do not match {1}'.format(
|
610
|
+
release['tag_name'], pattern))
|
611
|
+
continue
|
612
|
+
if keep_pattern is not None:
|
613
|
+
if fnmatch.fnmatch(release['tag_name'], keep_pattern):
|
614
|
+
continue
|
615
|
+
if release_type != 'all' and release_type != get_release_type(release):
|
616
|
+
if verbose:
|
617
|
+
print('skipping release {0}: type {1} is not {2}'.format(
|
618
|
+
release['tag_name'], get_release_type(release), release_type))
|
619
|
+
continue
|
620
|
+
# Assumes Zulu time.
|
621
|
+
# See https://stackoverflow.com/questions/127803/how-to-parse-an-iso-8601-formatted-date
|
622
|
+
utc = _UTC()
|
623
|
+
rel_date = datetime.strptime(release['created_at'], "%Y-%m-%dT%H:%M:%SZ").replace(tzinfo=utc)
|
624
|
+
rel_age = int((datetime.now(utc) - rel_date).total_seconds() / 60 / 60) # In hours
|
625
|
+
if older_than > rel_age:
|
626
|
+
if verbose:
|
627
|
+
print('skipping release {0}: created less than {1} hours ago ({2}hrs)'.format(
|
628
|
+
release['tag_name'], older_than, rel_age))
|
629
|
+
continue
|
630
|
+
candidates.append(release)
|
631
|
+
for release in candidates:
|
632
|
+
print('deleting release {0}'.format(release['tag_name']))
|
633
|
+
if dry_run:
|
634
|
+
continue
|
635
|
+
url = (github_api_url()
|
636
|
+
+ '/repos/{0}/releases/{1}'.format(repo_name, release['id']))
|
637
|
+
response = _request('DELETE', url)
|
638
|
+
response.raise_for_status()
|
639
|
+
return len(candidates) > 0
|
640
|
+
|
641
|
+
|
642
|
+
@gh_release.command("publish")
|
643
|
+
@click.argument("tag_name")
|
644
|
+
@click.option("--prerelease", is_flag=True, default=False)
|
645
|
+
@click.pass_obj
|
646
|
+
def _cli_release_publish(*args, **kwargs):
|
647
|
+
"""Publish a release setting draft to 'False'"""
|
648
|
+
gh_release_publish(*args, **kwargs)
|
649
|
+
|
650
|
+
|
651
|
+
@_check_for_credentials
|
652
|
+
def gh_release_publish(repo_name, tag_name, prerelease=False):
|
653
|
+
patch_release(repo_name, tag_name, draft=False, prerelease=prerelease)
|
654
|
+
|
655
|
+
|
656
|
+
@gh_release.command("unpublish")
|
657
|
+
@click.argument("tag_name")
|
658
|
+
@click.option("--prerelease", is_flag=True, default=False)
|
659
|
+
@click.pass_obj
|
660
|
+
def _cli_release_unpublish(*args, **kwargs):
|
661
|
+
"""Unpublish a release setting draft to 'True'"""
|
662
|
+
gh_release_unpublish(*args, **kwargs)
|
663
|
+
|
664
|
+
|
665
|
+
@_check_for_credentials
|
666
|
+
def gh_release_unpublish(repo_name, tag_name, prerelease=False):
|
667
|
+
draft = not prerelease
|
668
|
+
patch_release(repo_name, tag_name, draft=draft, prerelease=prerelease)
|
669
|
+
|
670
|
+
|
671
|
+
@gh_release.command("release-notes")
|
672
|
+
@click.argument("tag_name")
|
673
|
+
@click.pass_obj
|
674
|
+
def _cli_release_notes(*args, **kwargs):
|
675
|
+
"""Set release notes"""
|
676
|
+
gh_release_notes(*args, **kwargs)
|
677
|
+
|
678
|
+
|
679
|
+
@_check_for_credentials
|
680
|
+
def gh_release_notes(repo_name, tag_name):
|
681
|
+
release = get_release_info(repo_name, tag_name)
|
682
|
+
(_, filename) = tempfile.mkstemp(suffix='.md')
|
683
|
+
try:
|
684
|
+
if release['body']:
|
685
|
+
with open(filename, 'w+b') as f:
|
686
|
+
body = release['body']
|
687
|
+
if sys.version_info[0] >= 3:
|
688
|
+
body = body.encode('utf-8')
|
689
|
+
f.write(body)
|
690
|
+
if 'EDITOR' not in os.environ:
|
691
|
+
raise EnvironmentError(
|
692
|
+
"This command requires editor set using EDITOR "
|
693
|
+
"env. variable.")
|
694
|
+
ret = os.system('{0} {1}'.format(os.environ['EDITOR'], filename))
|
695
|
+
if ret:
|
696
|
+
raise Exception(
|
697
|
+
'{0} returned exit code {1}'.format(os.environ['EDITOR'], ret))
|
698
|
+
with open(filename, 'rb') as f:
|
699
|
+
body = f.read()
|
700
|
+
if sys.version_info[0] >= 3:
|
701
|
+
body = body.decode('utf-8')
|
702
|
+
if release['body'] == body:
|
703
|
+
return
|
704
|
+
patch_release(repo_name, tag_name, body=body)
|
705
|
+
finally:
|
706
|
+
os.remove(filename)
|
707
|
+
|
708
|
+
|
709
|
+
@gh_release.command("debug")
|
710
|
+
@click.argument("tag_name")
|
711
|
+
@click.pass_obj
|
712
|
+
def _cli_release_debug(repo_name, tag_name):
|
713
|
+
"""Print release detailed information"""
|
714
|
+
release = get_release_info(repo_name, tag_name)
|
715
|
+
pprint(release)
|
716
|
+
|
717
|
+
|
718
|
+
#
|
719
|
+
# Assets
|
720
|
+
#
|
721
|
+
|
722
|
+
def print_asset_info(i, asset, indent=""):
|
723
|
+
print(indent + "Asset #{i}".format(i=i))
|
724
|
+
indent = " " + indent
|
725
|
+
print(indent + "name : {name}".format(i=i, **asset))
|
726
|
+
print(indent + "state : {state}".format(i=i, **asset))
|
727
|
+
print(indent + "uploader : {login}".format(i=i, **asset['uploader']))
|
728
|
+
print(indent + "size : {size}".format(i=i, **asset))
|
729
|
+
print(indent + "URL : {browser_download_url}".format(i=i, **asset))
|
730
|
+
print(indent + "Downloads : {download_count}".format(i=i, **asset))
|
731
|
+
print("")
|
732
|
+
|
733
|
+
|
734
|
+
@gh_asset.command("upload")
|
735
|
+
@click.argument("tag_name")
|
736
|
+
@click.argument("pattern", nargs=-1)
|
737
|
+
@click.pass_obj
|
738
|
+
def _cli_asset_upload(*args, **kwargs):
|
739
|
+
"""Upload release assets"""
|
740
|
+
gh_asset_upload(*args, **kwargs)
|
741
|
+
|
742
|
+
|
743
|
+
class _ProgressFileReader(object):
|
744
|
+
"""Wrapper used to capture File IO read progress."""
|
745
|
+
def __init__(self, stream, reporter):
|
746
|
+
self._stream = stream
|
747
|
+
self._reporter = reporter
|
748
|
+
|
749
|
+
def read(self, _size):
|
750
|
+
_chunk = self._stream.read(_size)
|
751
|
+
self._reporter.update(len(_chunk))
|
752
|
+
return _chunk
|
753
|
+
|
754
|
+
def __getattr__(self, attr):
|
755
|
+
return getattr(self._stream, attr)
|
756
|
+
|
757
|
+
|
758
|
+
def _upload_release_file(
|
759
|
+
repo_name, tag_name, upload_url, filename,
|
760
|
+
verbose=False, dry_run=False, retry=True):
|
761
|
+
already_uploaded = False
|
762
|
+
uploaded = False
|
763
|
+
basename = os.path.basename(filename)
|
764
|
+
# Sanity checks
|
765
|
+
assets = get_assets(repo_name, tag_name)
|
766
|
+
download_url = None
|
767
|
+
for asset in assets:
|
768
|
+
if asset["name"] == basename:
|
769
|
+
if asset["state"] == "uploaded":
|
770
|
+
download_url = asset["browser_download_url"]
|
771
|
+
break
|
772
|
+
# Remove asset that failed to upload
|
773
|
+
# See https://developer.github.com/v3/repos/releases/#response-for-upstream-failure # noqa: E501
|
774
|
+
if asset["state"] == "new":
|
775
|
+
print(" deleting %s (invalid asset "
|
776
|
+
"with state set to 'new')" % asset['name'])
|
777
|
+
url = (
|
778
|
+
github_api_url()
|
779
|
+
+ '/repos/{0}/releases/assets/{1}'.format(
|
780
|
+
repo_name, asset['id'])
|
781
|
+
)
|
782
|
+
response = _request('DELETE', url)
|
783
|
+
response.raise_for_status()
|
784
|
+
|
785
|
+
print(" uploading %s" % filename)
|
786
|
+
|
787
|
+
# Skip if an asset with same name has already been uploaded
|
788
|
+
# Trying to upload would give a HTTP error 422
|
789
|
+
if download_url:
|
790
|
+
already_uploaded = True
|
791
|
+
print(" skipping (asset with same name already exists)")
|
792
|
+
print(" download_url: %s" % download_url)
|
793
|
+
print("")
|
794
|
+
return already_uploaded, uploaded, {}
|
795
|
+
if dry_run:
|
796
|
+
uploaded = True
|
797
|
+
print(" download_url: Unknown (dry_run)")
|
798
|
+
print("")
|
799
|
+
return already_uploaded, uploaded, {}
|
800
|
+
|
801
|
+
url = '{0}?name={1}'.format(upload_url, basename)
|
802
|
+
if verbose and not progress_reporter_cls.reportProgress:
|
803
|
+
print(" upload_url: %s" % url)
|
804
|
+
file_size = os.path.getsize(filename)
|
805
|
+
|
806
|
+
# Attempt upload
|
807
|
+
with open(filename, 'rb') as f:
|
808
|
+
with progress_reporter_cls(
|
809
|
+
label=basename, length=file_size) as reporter:
|
810
|
+
response = _request(
|
811
|
+
'POST', url,
|
812
|
+
headers={'Content-Type': 'application/octet-stream'},
|
813
|
+
data=_ProgressFileReader(f, reporter))
|
814
|
+
data = response.json()
|
815
|
+
|
816
|
+
if response.status_code == 502 and retry:
|
817
|
+
print(" retrying (upload failed with status_code=502)")
|
818
|
+
already_uploaded, uploaded, data = _upload_release_file(
|
819
|
+
repo_name, tag_name, upload_url, filename,
|
820
|
+
verbose=verbose, retry=False)
|
821
|
+
else:
|
822
|
+
response.raise_for_status()
|
823
|
+
asset = data
|
824
|
+
print(" download_url: %s" % asset["browser_download_url"])
|
825
|
+
print("")
|
826
|
+
uploaded = True
|
827
|
+
return already_uploaded, uploaded, response.json()
|
828
|
+
|
829
|
+
|
830
|
+
@_check_for_credentials
|
831
|
+
def gh_asset_upload(repo_name, tag_name, pattern, dry_run=False, verbose=False):
|
832
|
+
if not dry_run:
|
833
|
+
upload_url = get_release_info(repo_name, tag_name)["upload_url"]
|
834
|
+
if "{" in upload_url:
|
835
|
+
upload_url = upload_url[:upload_url.index("{")]
|
836
|
+
else:
|
837
|
+
upload_url = "unknown"
|
838
|
+
|
839
|
+
# Raise exception if no token is specified AND netrc file is found
|
840
|
+
# BUT only api.github.com is specified. See #17
|
841
|
+
has_github_token = "GITHUB_TOKEN" in os.environ
|
842
|
+
has_netrc = requests.utils.get_netrc_auth(github_api_url())
|
843
|
+
if not has_github_token and has_netrc:
|
844
|
+
if requests.utils.get_netrc_auth(upload_url) is None:
|
845
|
+
raise EnvironmentError(
|
846
|
+
"Found netrc file but upload URL is missing. "
|
847
|
+
"For more details, "
|
848
|
+
"see https://github.com/j0057/github-release#configuring")
|
849
|
+
|
850
|
+
if type(pattern) in [list, tuple]:
|
851
|
+
filenames = []
|
852
|
+
for package in pattern:
|
853
|
+
filenames.extend(glob.glob(package))
|
854
|
+
filenames = set(filenames)
|
855
|
+
elif pattern:
|
856
|
+
filenames = glob.glob(pattern)
|
857
|
+
else:
|
858
|
+
filenames = []
|
859
|
+
|
860
|
+
if len(filenames) > 0:
|
861
|
+
print("uploading '%s' release asset(s) "
|
862
|
+
"(found %s):" % (tag_name, len(filenames)))
|
863
|
+
|
864
|
+
uploaded = False
|
865
|
+
already_uploaded = False
|
866
|
+
|
867
|
+
for filename in filenames:
|
868
|
+
already_uploaded, uploaded, _ = _upload_release_file(
|
869
|
+
repo_name, tag_name, upload_url, filename, verbose, dry_run)
|
870
|
+
|
871
|
+
if not uploaded and not already_uploaded:
|
872
|
+
print("skipping upload of '%s' release assets ("
|
873
|
+
"no files match pattern(s): %s)" % (tag_name, pattern))
|
874
|
+
print("")
|
875
|
+
|
876
|
+
|
877
|
+
@gh_asset.command("delete")
|
878
|
+
@click.argument("tag_name")
|
879
|
+
@click.argument("pattern")
|
880
|
+
@click.option("--keep-pattern", default=None)
|
881
|
+
@click.option("--dry-run", is_flag=True, default=False)
|
882
|
+
@click.option("--verbose", is_flag=True, default=False)
|
883
|
+
@click.pass_obj
|
884
|
+
def _cli_asset_delete(*args, **kwargs):
|
885
|
+
"""Delete selected release assets"""
|
886
|
+
gh_asset_delete(*args, **kwargs)
|
887
|
+
|
888
|
+
|
889
|
+
@_check_for_credentials
|
890
|
+
def gh_asset_delete(repo_name, tag_name, pattern,
|
891
|
+
keep_pattern=None, dry_run=False, verbose=False):
|
892
|
+
# Get assets
|
893
|
+
assets = get_assets(repo_name, tag_name)
|
894
|
+
# List of assets
|
895
|
+
excluded_assets = {}
|
896
|
+
matched_assets = []
|
897
|
+
matched_assets_to_keep = {}
|
898
|
+
for asset in assets:
|
899
|
+
if not fnmatch.fnmatch(asset['name'], pattern):
|
900
|
+
skip_reason = "do NOT match pattern '%s'" % pattern
|
901
|
+
excluded_assets[asset['name']] = skip_reason
|
902
|
+
continue
|
903
|
+
matched_assets.append(asset)
|
904
|
+
if keep_pattern is not None:
|
905
|
+
if fnmatch.fnmatch(asset['name'], keep_pattern):
|
906
|
+
skip_reason = "match keep_pattern '%s'" % keep_pattern
|
907
|
+
matched_assets_to_keep[asset['name']] = skip_reason
|
908
|
+
continue
|
909
|
+
# Summary
|
910
|
+
summary = "matched: %s, matched-but-keep: %s, not-matched: %s" % (
|
911
|
+
len(matched_assets),
|
912
|
+
len(matched_assets_to_keep),
|
913
|
+
len(excluded_assets)
|
914
|
+
)
|
915
|
+
print("deleting '%s' release asset(s) (%s):" % (tag_name, summary))
|
916
|
+
# Perform deletion
|
917
|
+
for asset in matched_assets:
|
918
|
+
if asset['name'] in matched_assets_to_keep:
|
919
|
+
if verbose:
|
920
|
+
skip_reason = matched_assets_to_keep[asset['name']]
|
921
|
+
print(" skipping %s (%s)" % (asset['name'], skip_reason))
|
922
|
+
continue
|
923
|
+
print(" deleting %s" % asset['name'])
|
924
|
+
if dry_run:
|
925
|
+
continue
|
926
|
+
url = (
|
927
|
+
github_api_url()
|
928
|
+
+ '/repos/{0}/releases/assets/{1}'.format(repo_name, asset['id'])
|
929
|
+
)
|
930
|
+
response = _request('DELETE', url)
|
931
|
+
response.raise_for_status()
|
932
|
+
if len(matched_assets) == 0:
|
933
|
+
print(" nothing to delete")
|
934
|
+
print("")
|
935
|
+
if verbose:
|
936
|
+
indent = " "
|
937
|
+
print(indent + "assets NOT matching selection pattern [%s]:" % pattern)
|
938
|
+
for asset_name in excluded_assets:
|
939
|
+
print(indent + " " + asset_name)
|
940
|
+
print("")
|
941
|
+
|
942
|
+
|
943
|
+
@gh_asset.command("download")
|
944
|
+
@click.argument("tag_name")
|
945
|
+
@click.argument("pattern", required=False)
|
946
|
+
@click.pass_obj
|
947
|
+
def _cli_asset_download(*args, **kwargs):
|
948
|
+
"""Download release assets"""
|
949
|
+
gh_asset_download(*args, **kwargs)
|
950
|
+
|
951
|
+
|
952
|
+
def _download_file(repo_name, asset):
|
953
|
+
response = _request(
|
954
|
+
method='GET',
|
955
|
+
url=github_api_url() + '/repos/{0}/releases/assets/{1}'.format(
|
956
|
+
repo_name, asset['id']),
|
957
|
+
allow_redirects=False,
|
958
|
+
headers={'Accept': 'application/octet-stream'},
|
959
|
+
stream=True)
|
960
|
+
while response.status_code == 302:
|
961
|
+
response = _request(
|
962
|
+
'GET', response.headers['Location'], allow_redirects=False,
|
963
|
+
stream=True,
|
964
|
+
with_auth=False
|
965
|
+
)
|
966
|
+
with open(asset['name'], 'w+b') as f:
|
967
|
+
with progress_reporter_cls(
|
968
|
+
label=asset['name'], length=asset['size']) as reporter:
|
969
|
+
for chunk in response.iter_content(chunk_size=REQ_BUFFER_SIZE):
|
970
|
+
reporter.update(len(chunk))
|
971
|
+
f.write(chunk)
|
972
|
+
|
973
|
+
|
974
|
+
def gh_asset_download(repo_name, tag_name=None, pattern=None):
|
975
|
+
releases = get_releases(repo_name)
|
976
|
+
downloaded = 0
|
977
|
+
for release in releases:
|
978
|
+
if tag_name and not fnmatch.fnmatch(release['tag_name'], tag_name):
|
979
|
+
continue
|
980
|
+
for asset in release['assets']:
|
981
|
+
if pattern and not fnmatch.fnmatch(asset['name'], pattern):
|
982
|
+
continue
|
983
|
+
if os.path.exists(asset['name']):
|
984
|
+
absolute_path = os.path.abspath(asset['name'])
|
985
|
+
print('release {0}: '
|
986
|
+
'skipping {1}: '
|
987
|
+
'found {2}'.format(
|
988
|
+
release['tag_name'], asset['name'], absolute_path))
|
989
|
+
continue
|
990
|
+
print('release {0}: '
|
991
|
+
'downloading {1}'.format(release['tag_name'], asset['name']))
|
992
|
+
_download_file(repo_name, asset)
|
993
|
+
downloaded += 1
|
994
|
+
return downloaded
|
995
|
+
|
996
|
+
|
997
|
+
@gh_asset.command("list")
|
998
|
+
@click.argument("tag_name")
|
999
|
+
@click.pass_obj
|
1000
|
+
def _cli_asset_list(repo_name, tag_name):
|
1001
|
+
"""List release assets"""
|
1002
|
+
return get_assets(repo_name, tag_name, verbose=True)
|
1003
|
+
|
1004
|
+
|
1005
|
+
#
|
1006
|
+
# References
|
1007
|
+
#
|
1008
|
+
|
1009
|
+
def print_object_info(ref_object, indent=""):
|
1010
|
+
print(indent + 'Object')
|
1011
|
+
print(indent + ' type : {type}'.format(**ref_object))
|
1012
|
+
print(indent + ' sha : {sha}'.format(**ref_object))
|
1013
|
+
|
1014
|
+
|
1015
|
+
def print_ref_info(ref, indent=""):
|
1016
|
+
print(indent + "Reference '{ref}'".format(**ref))
|
1017
|
+
print_object_info(ref['object'], indent=" " + indent)
|
1018
|
+
print("")
|
1019
|
+
|
1020
|
+
|
1021
|
+
def get_refs(repo_name, tags=None, pattern=None):
|
1022
|
+
|
1023
|
+
refs = []
|
1024
|
+
_recursive_gh_get(
|
1025
|
+
github_api_url() + '/repos/{0}/git/refs'.format(repo_name), refs)
|
1026
|
+
|
1027
|
+
# If "tags" is True, keep only "refs/tags/*"
|
1028
|
+
data = refs
|
1029
|
+
if tags:
|
1030
|
+
tag_names = []
|
1031
|
+
data = []
|
1032
|
+
for ref in refs:
|
1033
|
+
if ref['ref'].startswith("refs/tags"):
|
1034
|
+
data.append(ref)
|
1035
|
+
tag_names.append(ref["ref"])
|
1036
|
+
|
1037
|
+
try:
|
1038
|
+
tags = []
|
1039
|
+
_recursive_gh_get(
|
1040
|
+
github_api_url() + '/repos/{0}/git/refs/tags'.format(repo_name), tags)
|
1041
|
+
for ref in tags:
|
1042
|
+
if ref["ref"] not in tag_names:
|
1043
|
+
data.append(ref)
|
1044
|
+
except requests.exceptions.HTTPError as exc_info:
|
1045
|
+
response = exc_info.response
|
1046
|
+
if response.status_code != 404:
|
1047
|
+
raise
|
1048
|
+
|
1049
|
+
# If "pattern" is not None, select only matching references
|
1050
|
+
filtered_data = data
|
1051
|
+
if pattern is not None:
|
1052
|
+
filtered_data = []
|
1053
|
+
for ref in data:
|
1054
|
+
if fnmatch.fnmatch(ref['ref'], pattern):
|
1055
|
+
filtered_data.append(ref)
|
1056
|
+
|
1057
|
+
return filtered_data
|
1058
|
+
|
1059
|
+
|
1060
|
+
@gh_ref.command("list")
|
1061
|
+
@click.option("--tags", is_flag=True, default=False)
|
1062
|
+
@click.option("--pattern", default=None)
|
1063
|
+
@click.option("--verbose", is_flag=True, default=False)
|
1064
|
+
@click.pass_obj
|
1065
|
+
def _cli_ref_list(*args, **kwargs):
|
1066
|
+
"""List all references"""
|
1067
|
+
gh_ref_list(*args, **kwargs)
|
1068
|
+
|
1069
|
+
|
1070
|
+
def gh_ref_list(repo_name, tags=None, pattern=None, verbose=False):
|
1071
|
+
refs = get_refs(repo_name, tags=tags, pattern=pattern)
|
1072
|
+
sorted_refs = sorted(refs, key=lambda r: r['ref'])
|
1073
|
+
if verbose:
|
1074
|
+
list(map(print_ref_info, sorted_refs))
|
1075
|
+
else:
|
1076
|
+
list(map(lambda ref: print(ref['ref']), sorted_refs))
|
1077
|
+
return sorted_refs
|
1078
|
+
|
1079
|
+
|
1080
|
+
@gh_ref.command("create")
|
1081
|
+
@click.argument("reference")
|
1082
|
+
@click.argument("sha")
|
1083
|
+
@click.pass_obj
|
1084
|
+
def _cli_ref_create(*args, **kwargs):
|
1085
|
+
"""Create reference (e.g heads/foo, tags/foo)"""
|
1086
|
+
gh_ref_create(*args, **kwargs)
|
1087
|
+
|
1088
|
+
|
1089
|
+
@_check_for_credentials
|
1090
|
+
def gh_ref_create(repo_name, reference, sha):
|
1091
|
+
data = {
|
1092
|
+
'ref': "refs/%s" % reference,
|
1093
|
+
'sha': sha
|
1094
|
+
}
|
1095
|
+
response = _request(
|
1096
|
+
'POST', github_api_url() + '/repos/{0}/git/refs'.format(repo_name),
|
1097
|
+
data=json.dumps(data),
|
1098
|
+
headers={'Content-Type': 'application/json'})
|
1099
|
+
response.raise_for_status()
|
1100
|
+
print_ref_info(response.json())
|
1101
|
+
|
1102
|
+
|
1103
|
+
@gh_ref.command("delete")
|
1104
|
+
@click.argument("pattern")
|
1105
|
+
@click.option("--keep-pattern", default=None)
|
1106
|
+
@click.option("--tags", is_flag=True, default=False)
|
1107
|
+
@click.option("--dry-run", is_flag=True, default=False)
|
1108
|
+
@click.option("--verbose", is_flag=True, default=False)
|
1109
|
+
@click.pass_obj
|
1110
|
+
def _cli_ref_delete(*args, **kwargs):
|
1111
|
+
"""Delete selected references"""
|
1112
|
+
gh_ref_delete(*args, **kwargs)
|
1113
|
+
|
1114
|
+
|
1115
|
+
@_check_for_credentials
|
1116
|
+
def gh_ref_delete(repo_name, pattern, keep_pattern=None, tags=False,
|
1117
|
+
dry_run=False, verbose=False):
|
1118
|
+
removed_refs = []
|
1119
|
+
refs = get_refs(repo_name, tags=tags)
|
1120
|
+
for ref in refs:
|
1121
|
+
if not fnmatch.fnmatch(ref['ref'], pattern):
|
1122
|
+
if verbose:
|
1123
|
+
print('skipping reference {0}: '
|
1124
|
+
'do not match {1}'.format(ref['ref'], pattern))
|
1125
|
+
continue
|
1126
|
+
if keep_pattern is not None:
|
1127
|
+
if fnmatch.fnmatch(ref['ref'], keep_pattern):
|
1128
|
+
continue
|
1129
|
+
print('deleting reference {0}'.format(ref['ref']))
|
1130
|
+
removed_refs.append(ref['ref'])
|
1131
|
+
if dry_run:
|
1132
|
+
continue
|
1133
|
+
response = _request(
|
1134
|
+
'DELETE',
|
1135
|
+
github_api_url() + '/repos/{0}/git/{1}'.format(repo_name, ref['ref']))
|
1136
|
+
response.raise_for_status()
|
1137
|
+
return len(removed_refs) > 0
|
1138
|
+
|
1139
|
+
|
1140
|
+
#
|
1141
|
+
# Commits
|
1142
|
+
#
|
1143
|
+
|
1144
|
+
def gh_commit_get(repo_name, sha):
|
1145
|
+
try:
|
1146
|
+
response = _request(
|
1147
|
+
'GET',
|
1148
|
+
github_api_url() + '/repos/{0}/git/commits/{1}'.format(repo_name, sha))
|
1149
|
+
response.raise_for_status()
|
1150
|
+
return response.json()
|
1151
|
+
except requests.exceptions.HTTPError as exc_info:
|
1152
|
+
response = exc_info.response
|
1153
|
+
if response.status_code == 404:
|
1154
|
+
return None
|
1155
|
+
else:
|
1156
|
+
raise
|
1157
|
+
|
1158
|
+
|
1159
|
+
#
|
1160
|
+
# Script entry point
|
1161
|
+
#
|
1162
|
+
|
1163
|
+
if __name__ == '__main__':
|
1164
|
+
main()
|