@semantic-release/github 4.4.0 → 5.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/definitions/rate-limit.js +27 -0
- package/lib/fail.js +8 -2
- package/lib/get-client.js +46 -43
- package/lib/glob-assets.js +28 -7
- package/lib/publish.js +12 -6
- package/lib/resolve-config.js +8 -15
- package/lib/success.js +13 -5
- package/lib/verify.js +6 -2
- package/package.json +7 -3
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Default exponential backoff configuration for retries.
|
|
3
|
+
*/
|
|
4
|
+
const RETRY_CONF = {retries: 3, factor: 2, minTimeout: 1000};
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Rate limit per API endpoints.
|
|
8
|
+
*
|
|
9
|
+
* See {@link https://developer.github.com/v3/search/#rate-limit|Search API rate limit}.
|
|
10
|
+
* See {@link https://developer.github.com/v3/#rate-limiting|Rate limiting}.
|
|
11
|
+
*/
|
|
12
|
+
const RATE_LIMITS = {
|
|
13
|
+
search: ((60 * 1000) / 30) * 1.1, // 30 calls per minutes => 1 call every 2s + 10% safety margin
|
|
14
|
+
core: {
|
|
15
|
+
read: ((60 * 60 * 1000) / 5000) * 1.1, // 5000 calls per hour => 1 call per 720ms + 10% safety margin
|
|
16
|
+
write: 3000, // 1 call every 3 seconds
|
|
17
|
+
},
|
|
18
|
+
};
|
|
19
|
+
|
|
20
|
+
/**
|
|
21
|
+
* Global rate limit to prevent abuse.
|
|
22
|
+
*
|
|
23
|
+
* See {@link https://developer.github.com/v3/guides/best-practices-for-integrators/#dealing-with-abuse-rate-limits|Dealing with abuse rate limits}
|
|
24
|
+
*/
|
|
25
|
+
const GLOBAL_RATE_LIMIT = 1000;
|
|
26
|
+
|
|
27
|
+
module.exports = {RETRY_CONF, RATE_LIMITS, GLOBAL_RATE_LIMIT};
|
package/lib/fail.js
CHANGED
|
@@ -7,9 +7,15 @@ const getClient = require('./get-client');
|
|
|
7
7
|
const findSRIssues = require('./find-sr-issues');
|
|
8
8
|
const getFailComment = require('./get-fail-comment');
|
|
9
9
|
|
|
10
|
-
module.exports = async (pluginConfig,
|
|
10
|
+
module.exports = async (pluginConfig, context) => {
|
|
11
|
+
const {
|
|
12
|
+
options: {branch, repositoryUrl},
|
|
13
|
+
errors,
|
|
14
|
+
logger,
|
|
15
|
+
} = context;
|
|
11
16
|
const {githubToken, githubUrl, githubApiPathPrefix, proxy, failComment, failTitle, labels, assignees} = resolveConfig(
|
|
12
|
-
pluginConfig
|
|
17
|
+
pluginConfig,
|
|
18
|
+
context
|
|
13
19
|
);
|
|
14
20
|
const {name: repo, owner} = parseGithubUrl(repositoryUrl);
|
|
15
21
|
const github = getClient({githubToken, githubUrl, githubApiPathPrefix, proxy});
|
package/lib/get-client.js
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
const url = require('url');
|
|
2
|
-
const {memoize} = require('lodash');
|
|
2
|
+
const {memoize, get} = require('lodash');
|
|
3
3
|
const Octokit = require('@octokit/rest');
|
|
4
4
|
const pRetry = require('p-retry');
|
|
5
5
|
const Bottleneck = require('bottleneck');
|
|
@@ -7,28 +7,8 @@ const urljoin = require('url-join');
|
|
|
7
7
|
const HttpProxyAgent = require('http-proxy-agent');
|
|
8
8
|
const HttpsProxyAgent = require('https-proxy-agent');
|
|
9
9
|
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
*/
|
|
13
|
-
const DEFAULT_RETRY = {retries: 3, factor: 2, minTimeout: 1000};
|
|
14
|
-
|
|
15
|
-
/**
|
|
16
|
-
* Rate limit per API endpoints.
|
|
17
|
-
*
|
|
18
|
-
* See {@link https://developer.github.com/v3/search/#rate-limit|Search API rate limit}.
|
|
19
|
-
* See {@link https://developer.github.com/v3/#rate-limiting|Rate limiting}.
|
|
20
|
-
*/
|
|
21
|
-
const RATE_LIMITS = {
|
|
22
|
-
search: (60 * 1000) / 30, // 30 calls per minutes => 1 call per 2s
|
|
23
|
-
core: (60 * 60 * 1000) / 5000, // 5000 calls per hour => 1 call per 720ms
|
|
24
|
-
};
|
|
25
|
-
|
|
26
|
-
/**
|
|
27
|
-
* Global rate limit to prevent abuse.
|
|
28
|
-
*
|
|
29
|
-
* See {@link https://developer.github.com/v3/guides/best-practices-for-integrators/#dealing-with-abuse-rate-limits|Dealing with abuse rate limits}
|
|
30
|
-
*/
|
|
31
|
-
const GLOBAL_RATE_LIMIT = 1000;
|
|
10
|
+
const GH_ROUTES = require('@octokit/rest/lib/routes');
|
|
11
|
+
const {RETRY_CONF, RATE_LIMITS, GLOBAL_RATE_LIMIT} = require('./definitions/rate-limit');
|
|
32
12
|
|
|
33
13
|
/**
|
|
34
14
|
* Http error codes for which to not retry.
|
|
@@ -41,35 +21,66 @@ const SKIP_RETRY_CODES = [400, 401, 403];
|
|
|
41
21
|
* @param {Array} rate The rate limit group.
|
|
42
22
|
* @param {String} limit The rate limits per API endpoints.
|
|
43
23
|
* @param {Bottleneck} globalThrottler The global throttler.
|
|
24
|
+
*
|
|
44
25
|
* @return {Bottleneck} The throller function for the given rate limit group.
|
|
45
26
|
*/
|
|
46
|
-
const getThrottler = memoize((rate,
|
|
47
|
-
new Bottleneck({minTime:
|
|
27
|
+
const getThrottler = memoize((rate, globalThrottler) =>
|
|
28
|
+
new Bottleneck({minTime: get(RATE_LIMITS, rate)}).chain(globalThrottler)
|
|
48
29
|
);
|
|
49
30
|
|
|
31
|
+
/**
|
|
32
|
+
* Determine if a call to a client function will trigger a read (`GET`) or a write (`POST`, `PATCH`, etc...) request.
|
|
33
|
+
*
|
|
34
|
+
* @param {String} endpoint The client API enpoint (for example the endpoint for a call to `github.repos.get` is `repos`).
|
|
35
|
+
* @param {String} command The client API command (for example the command for a call to `github.repos.get` is `get`).
|
|
36
|
+
*
|
|
37
|
+
* @return {String} `write` or `read` if there is rate limit configuration for this `endpoint` and `command`, `undefined` otherwise.
|
|
38
|
+
*/
|
|
39
|
+
const getAccess = (endpoint, command) => {
|
|
40
|
+
const method = GH_ROUTES[endpoint] && GH_ROUTES[endpoint][command] && GH_ROUTES[endpoint][command].method;
|
|
41
|
+
const access = method && method === 'GET' ? 'read' : 'write';
|
|
42
|
+
return RATE_LIMITS[endpoint][access] && access;
|
|
43
|
+
};
|
|
44
|
+
|
|
45
|
+
/**
|
|
46
|
+
* Get the limiter identifier associated with a client API call.
|
|
47
|
+
*
|
|
48
|
+
* @param {String} endpoint The client API enpoint (for example the endpoint for a call to `github.repos.get` is `repos`).
|
|
49
|
+
* @param {String} command The client API command (for example the command for a call to `github.repos.get` is `get`).
|
|
50
|
+
*
|
|
51
|
+
* @return {String} A string identifying the limiter to use for this `endpoint` and `command` (e.g. `search` or `core.write`).
|
|
52
|
+
*/
|
|
53
|
+
const getLimitKey = (endpoint, command) => {
|
|
54
|
+
return endpoint
|
|
55
|
+
? [endpoint, RATE_LIMITS[endpoint] && getAccess(endpoint, command)].filter(Boolean).join('.')
|
|
56
|
+
: RATE_LIMITS[command]
|
|
57
|
+
? command
|
|
58
|
+
: 'core';
|
|
59
|
+
};
|
|
60
|
+
|
|
50
61
|
/**
|
|
51
62
|
* Create a`handler` for a `Proxy` wrapping an Octokit instance to:
|
|
52
63
|
* - Recursively wrap the child objects of the Octokit instance in a `Proxy`
|
|
53
64
|
* - Throttle and retry the Octokit instance functions
|
|
54
65
|
*
|
|
55
|
-
* @param {Object} retry The configuration to pass to `p-retry`.
|
|
56
|
-
* @param {Array} limit The rate limits per API endpoints.
|
|
57
66
|
* @param {Throttler} globalThrottler The throller function for the global rate limit.
|
|
58
|
-
* @param {String}
|
|
67
|
+
* @param {String} limitKey The key to find the limit rate for the API endpoint and method.
|
|
68
|
+
*
|
|
59
69
|
* @return {Function} The `handler` for a `Proxy` wrapping an Octokit instance.
|
|
60
70
|
*/
|
|
61
|
-
const handler = (
|
|
71
|
+
const handler = (globalThrottler, limitKey) => ({
|
|
62
72
|
/**
|
|
63
73
|
* If the target has the property as own, determine the rate limit based on the property name and recursively wrap the value in a `Proxy`. Otherwise returns the property value.
|
|
64
74
|
*
|
|
65
75
|
* @param {Object} target The target object.
|
|
66
76
|
* @param {String} name The name of the property to get.
|
|
67
77
|
* @param {Any} receiver The `Proxy` object.
|
|
78
|
+
*
|
|
68
79
|
* @return {Any} The property value or a `Proxy` of the property value.
|
|
69
80
|
*/
|
|
70
81
|
get: (target, name, receiver) =>
|
|
71
82
|
Reflect.apply(Object.prototype.hasOwnProperty, target, [name])
|
|
72
|
-
? new Proxy(target[name], handler(
|
|
83
|
+
? new Proxy(target[name], handler(globalThrottler, getLimitKey(limitKey, name)))
|
|
73
84
|
: Reflect.get(target, name, receiver),
|
|
74
85
|
|
|
75
86
|
/**
|
|
@@ -78,11 +89,11 @@ const handler = (retry, limit, globalThrottler, endpoint) => ({
|
|
|
78
89
|
* @param {Function} func The target function.
|
|
79
90
|
* @param {Any} that The this argument for the call.
|
|
80
91
|
* @param {Array} args The list of arguments for the call.
|
|
92
|
+
*
|
|
81
93
|
* @return {Promise<Any>} The result of the function called.
|
|
82
94
|
*/
|
|
83
95
|
apply: (func, that, args) => {
|
|
84
|
-
const throttler = getThrottler(
|
|
85
|
-
|
|
96
|
+
const throttler = getThrottler(limitKey, globalThrottler);
|
|
86
97
|
return pRetry(async () => {
|
|
87
98
|
try {
|
|
88
99
|
return await throttler.wrap(func)(...args);
|
|
@@ -92,19 +103,11 @@ const handler = (retry, limit, globalThrottler, endpoint) => ({
|
|
|
92
103
|
}
|
|
93
104
|
throw err;
|
|
94
105
|
}
|
|
95
|
-
},
|
|
106
|
+
}, RETRY_CONF);
|
|
96
107
|
},
|
|
97
108
|
});
|
|
98
109
|
|
|
99
|
-
module.exports = ({
|
|
100
|
-
githubToken,
|
|
101
|
-
githubUrl,
|
|
102
|
-
githubApiPathPrefix,
|
|
103
|
-
proxy,
|
|
104
|
-
retry = DEFAULT_RETRY,
|
|
105
|
-
limit = RATE_LIMITS,
|
|
106
|
-
globalLimit = GLOBAL_RATE_LIMIT,
|
|
107
|
-
}) => {
|
|
110
|
+
module.exports = ({githubToken, githubUrl, githubApiPathPrefix, proxy} = {}) => {
|
|
108
111
|
const baseUrl = githubUrl && urljoin(githubUrl, githubApiPathPrefix);
|
|
109
112
|
const github = new Octokit({
|
|
110
113
|
baseUrl,
|
|
@@ -115,5 +118,5 @@ module.exports = ({
|
|
|
115
118
|
: undefined,
|
|
116
119
|
});
|
|
117
120
|
github.authenticate({type: 'token', token: githubToken});
|
|
118
|
-
return new Proxy(github, handler(
|
|
121
|
+
return new Proxy(github, handler(new Bottleneck({minTime: GLOBAL_RATE_LIMIT})));
|
|
119
122
|
};
|
package/lib/glob-assets.js
CHANGED
|
@@ -1,25 +1,44 @@
|
|
|
1
|
+
const path = require('path');
|
|
1
2
|
const {basename} = require('path');
|
|
2
|
-
const {isPlainObject, castArray, uniqWith} = require('lodash');
|
|
3
|
+
const {isPlainObject, castArray, uniqWith, uniq} = require('lodash');
|
|
4
|
+
const dirGlob = require('dir-glob');
|
|
3
5
|
const globby = require('globby');
|
|
4
6
|
const debug = require('debug')('semantic-release:github');
|
|
5
7
|
|
|
6
|
-
|
|
8
|
+
const filesTransform = (files, cwd, transform) =>
|
|
9
|
+
files.map(file => `${file.startsWith('!') ? '!' : ''}${transform(cwd, file.startsWith('!') ? file.slice(1) : file)}`);
|
|
10
|
+
|
|
11
|
+
module.exports = async ({cwd}, assets) =>
|
|
7
12
|
uniqWith(
|
|
8
13
|
[]
|
|
9
14
|
.concat(
|
|
10
15
|
...(await Promise.all(
|
|
11
16
|
assets.map(async asset => {
|
|
12
17
|
// Wrap single glob definition in Array
|
|
13
|
-
|
|
18
|
+
let glob = castArray(isPlainObject(asset) ? asset.path : asset);
|
|
19
|
+
// TODO Temporary workaround for https://github.com/kevva/dir-glob/issues/7 and https://github.com/mrmlnc/fast-glob/issues/47
|
|
20
|
+
glob = uniq([
|
|
21
|
+
...filesTransform(await dirGlob(filesTransform(glob, cwd, path.resolve)), cwd, path.relative),
|
|
22
|
+
...glob,
|
|
23
|
+
]);
|
|
24
|
+
|
|
14
25
|
// Skip solo negated pattern (avoid to include every non js file with `!**/*.js`)
|
|
15
26
|
if (glob.length <= 1 && glob[0].startsWith('!')) {
|
|
16
27
|
debug(
|
|
17
|
-
'skipping the negated glob %o as its alone in its group and would retrieve a large amount of files
|
|
28
|
+
'skipping the negated glob %o as its alone in its group and would retrieve a large amount of files',
|
|
18
29
|
glob[0]
|
|
19
30
|
);
|
|
20
31
|
return [];
|
|
21
32
|
}
|
|
22
|
-
|
|
33
|
+
|
|
34
|
+
const globbed = await globby(glob, {
|
|
35
|
+
cwd,
|
|
36
|
+
expandDirectories: true,
|
|
37
|
+
gitignore: false,
|
|
38
|
+
dot: true,
|
|
39
|
+
onlyFiles: false,
|
|
40
|
+
});
|
|
41
|
+
|
|
23
42
|
if (isPlainObject(asset)) {
|
|
24
43
|
if (globbed.length > 1) {
|
|
25
44
|
// If asset is an Object with a glob the `path` property that resolve to multiple files,
|
|
@@ -27,17 +46,19 @@ module.exports = async assets =>
|
|
|
27
46
|
// - `path` of the matched file
|
|
28
47
|
// - `name` based on the actual file name (to avoid assets with duplicate `name`)
|
|
29
48
|
// - other properties of the original asset definition
|
|
30
|
-
return globbed.map(file =>
|
|
49
|
+
return globbed.map(file => ({...asset, path: file, name: basename(file)}));
|
|
31
50
|
}
|
|
32
51
|
// If asset is an Object, output an Object definition with:
|
|
33
52
|
// - `path` of the matched file if there is one, or the original `path` definition (will be considered as a missing file)
|
|
34
53
|
// - other properties of the original asset definition
|
|
35
|
-
return
|
|
54
|
+
return {...asset, path: globbed[0] || asset.path};
|
|
36
55
|
}
|
|
56
|
+
|
|
37
57
|
if (globbed.length > 0) {
|
|
38
58
|
// If asset is a String definition, output each files matched
|
|
39
59
|
return globbed;
|
|
40
60
|
}
|
|
61
|
+
|
|
41
62
|
// If asset is a String definition but no match is found, output the elements of the original glob (each one will be considered as a missing file)
|
|
42
63
|
return glob;
|
|
43
64
|
})
|
package/lib/publish.js
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
const {basename, extname} = require('path');
|
|
1
|
+
const {basename, extname, resolve} = require('path');
|
|
2
2
|
const {stat, readFile} = require('fs-extra');
|
|
3
3
|
const {isPlainObject} = require('lodash');
|
|
4
4
|
const parseGithubUrl = require('parse-github-url');
|
|
@@ -8,8 +8,14 @@ const globAssets = require('./glob-assets.js');
|
|
|
8
8
|
const resolveConfig = require('./resolve-config');
|
|
9
9
|
const getClient = require('./get-client');
|
|
10
10
|
|
|
11
|
-
module.exports = async (pluginConfig,
|
|
12
|
-
const {
|
|
11
|
+
module.exports = async (pluginConfig, context) => {
|
|
12
|
+
const {
|
|
13
|
+
cwd,
|
|
14
|
+
options: {branch, repositoryUrl},
|
|
15
|
+
nextRelease: {gitTag, notes},
|
|
16
|
+
logger,
|
|
17
|
+
} = context;
|
|
18
|
+
const {githubToken, githubUrl, githubApiPathPrefix, proxy, assets} = resolveConfig(pluginConfig, context);
|
|
13
19
|
const {name: repo, owner} = parseGithubUrl(repositoryUrl);
|
|
14
20
|
const github = getClient({githubToken, githubUrl, githubApiPathPrefix, proxy});
|
|
15
21
|
const release = {owner, repo, tag_name: gitTag, name: gitTag, target_commitish: branch, body: notes}; // eslint-disable-line camelcase
|
|
@@ -25,7 +31,7 @@ module.exports = async (pluginConfig, {options: {branch, repositoryUrl}, nextRel
|
|
|
25
31
|
logger.log('Published GitHub release: %s', url);
|
|
26
32
|
|
|
27
33
|
if (assets && assets.length > 0) {
|
|
28
|
-
const globbedAssets = await globAssets(assets);
|
|
34
|
+
const globbedAssets = await globAssets(context, assets);
|
|
29
35
|
debug('globed assets: %o', globbedAssets);
|
|
30
36
|
|
|
31
37
|
await Promise.all(
|
|
@@ -34,7 +40,7 @@ module.exports = async (pluginConfig, {options: {branch, repositoryUrl}, nextRel
|
|
|
34
40
|
let file;
|
|
35
41
|
|
|
36
42
|
try {
|
|
37
|
-
file = await stat(filePath);
|
|
43
|
+
file = await stat(resolve(cwd, filePath));
|
|
38
44
|
} catch (err) {
|
|
39
45
|
logger.error('The asset %s cannot be read, and will be ignored.', filePath);
|
|
40
46
|
return;
|
|
@@ -47,7 +53,7 @@ module.exports = async (pluginConfig, {options: {branch, repositoryUrl}, nextRel
|
|
|
47
53
|
const fileName = asset.name || basename(filePath);
|
|
48
54
|
const upload = {
|
|
49
55
|
url: uploadUrl,
|
|
50
|
-
file: await readFile(filePath),
|
|
56
|
+
file: await readFile(resolve(cwd, filePath)),
|
|
51
57
|
contentType: mime.getType(extname(fileName)) || 'text/plain',
|
|
52
58
|
contentLength: file.size,
|
|
53
59
|
name: fileName,
|
package/lib/resolve-config.js
CHANGED
|
@@ -1,20 +1,13 @@
|
|
|
1
1
|
const {isUndefined, castArray} = require('lodash');
|
|
2
2
|
|
|
3
|
-
module.exports = (
|
|
4
|
-
githubUrl,
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
labels,
|
|
12
|
-
assignees,
|
|
13
|
-
}) => ({
|
|
14
|
-
githubToken: process.env.GH_TOKEN || process.env.GITHUB_TOKEN,
|
|
15
|
-
githubUrl: githubUrl || process.env.GH_URL || process.env.GITHUB_URL,
|
|
16
|
-
githubApiPathPrefix: githubApiPathPrefix || process.env.GH_PREFIX || process.env.GITHUB_PREFIX || '',
|
|
17
|
-
proxy: proxy || process.env.HTTP_PROXY,
|
|
3
|
+
module.exports = (
|
|
4
|
+
{githubUrl, githubApiPathPrefix, proxy, assets, successComment, failTitle, failComment, labels, assignees},
|
|
5
|
+
{env}
|
|
6
|
+
) => ({
|
|
7
|
+
githubToken: env.GH_TOKEN || env.GITHUB_TOKEN,
|
|
8
|
+
githubUrl: githubUrl || env.GH_URL || env.GITHUB_URL,
|
|
9
|
+
githubApiPathPrefix: githubApiPathPrefix || env.GH_PREFIX || env.GITHUB_PREFIX || '',
|
|
10
|
+
proxy: proxy || env.HTTP_PROXY,
|
|
18
11
|
assets: assets ? castArray(assets) : assets,
|
|
19
12
|
successComment,
|
|
20
13
|
failTitle: isUndefined(failTitle) || failTitle === false ? 'The automated release is failing 🚨' : failTitle,
|
package/lib/success.js
CHANGED
|
@@ -10,11 +10,19 @@ const getSearchQueries = require('./get-search-queries');
|
|
|
10
10
|
const getSuccessComment = require('./get-success-comment');
|
|
11
11
|
const findSRIssues = require('./find-sr-issues');
|
|
12
12
|
|
|
13
|
-
module.exports = async (
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
13
|
+
module.exports = async (pluginConfig, context) => {
|
|
14
|
+
const {
|
|
15
|
+
options: {branch, repositoryUrl},
|
|
16
|
+
lastRelease,
|
|
17
|
+
commits,
|
|
18
|
+
nextRelease,
|
|
19
|
+
releases,
|
|
20
|
+
logger,
|
|
21
|
+
} = context;
|
|
22
|
+
const {githubToken, githubUrl, githubApiPathPrefix, proxy, successComment, failTitle} = resolveConfig(
|
|
23
|
+
pluginConfig,
|
|
24
|
+
context
|
|
25
|
+
);
|
|
18
26
|
const {name: repo, owner} = parseGithubUrl(repositoryUrl);
|
|
19
27
|
const github = getClient({githubToken, githubUrl, githubApiPathPrefix, proxy});
|
|
20
28
|
const parser = issueParser('github', githubUrl ? {hosts: [githubUrl]} : {});
|
package/lib/verify.js
CHANGED
|
@@ -23,8 +23,12 @@ const VALIDATORS = {
|
|
|
23
23
|
assignees: isArrayOf(isStringOrStringArray),
|
|
24
24
|
};
|
|
25
25
|
|
|
26
|
-
module.exports = async (pluginConfig,
|
|
27
|
-
const {
|
|
26
|
+
module.exports = async (pluginConfig, context) => {
|
|
27
|
+
const {
|
|
28
|
+
options: {repositoryUrl},
|
|
29
|
+
logger,
|
|
30
|
+
} = context;
|
|
31
|
+
const {githubToken, githubUrl, githubApiPathPrefix, proxy, ...options} = resolveConfig(pluginConfig, context);
|
|
28
32
|
|
|
29
33
|
const errors = Object.entries({...options, proxy}).reduce(
|
|
30
34
|
(errors, [option, value]) =>
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@semantic-release/github",
|
|
3
3
|
"description": "Set of semantic-release plugins for publishing a GitHub release",
|
|
4
|
-
"version": "
|
|
4
|
+
"version": "5.0.1",
|
|
5
5
|
"author": "Pierre Vanduynslager (https://twitter.com/@pvdlg_)",
|
|
6
6
|
"bugs": {
|
|
7
7
|
"url": "https://github.com/semantic-release/github/issues"
|
|
@@ -21,7 +21,8 @@
|
|
|
21
21
|
"aggregate-error": "^1.0.0",
|
|
22
22
|
"bottleneck": "^2.0.1",
|
|
23
23
|
"debug": "^3.1.0",
|
|
24
|
-
"
|
|
24
|
+
"dir-glob": "^2.0.0",
|
|
25
|
+
"fs-extra": "^7.0.0",
|
|
25
26
|
"globby": "^8.0.0",
|
|
26
27
|
"http-proxy-agent": "^2.1.0",
|
|
27
28
|
"https-proxy-agent": "^2.2.1",
|
|
@@ -35,7 +36,7 @@
|
|
|
35
36
|
},
|
|
36
37
|
"devDependencies": {
|
|
37
38
|
"ava": "^0.25.0",
|
|
38
|
-
"clear-module": "^
|
|
39
|
+
"clear-module": "^3.0.0",
|
|
39
40
|
"codecov": "^3.0.0",
|
|
40
41
|
"commitizen": "^2.9.6",
|
|
41
42
|
"cz-conventional-changelog": "^2.0.0",
|
|
@@ -82,6 +83,9 @@
|
|
|
82
83
|
],
|
|
83
84
|
"all": true
|
|
84
85
|
},
|
|
86
|
+
"peerDependencies": {
|
|
87
|
+
"semantic-release": ">=15.8.0 <16.0.0"
|
|
88
|
+
},
|
|
85
89
|
"prettier": {
|
|
86
90
|
"printWidth": 120,
|
|
87
91
|
"trailingComma": "es5"
|