@semantic-release/github 8.0.7 → 8.0.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/add-channel.js +4 -4
- package/lib/definitions/retry.js +10 -0
- package/lib/definitions/throttle.js +7 -0
- package/lib/fail.js +14 -6
- package/lib/find-sr-issues.js +2 -2
- package/lib/get-client.js +9 -48
- package/lib/publish.js +11 -5
- package/lib/semantic-release-octokit.js +35 -0
- package/lib/success.js +33 -17
- package/lib/verify.js +5 -5
- package/package.json +12 -10
- package/lib/definitions/rate-limit.js +0 -27
package/lib/add-channel.js
CHANGED
|
@@ -14,7 +14,7 @@ module.exports = async (pluginConfig, context) => {
|
|
|
14
14
|
} = context;
|
|
15
15
|
const {githubToken, githubUrl, githubApiPathPrefix, proxy} = resolveConfig(pluginConfig, context);
|
|
16
16
|
const {owner, repo} = parseGithubUrl(repositoryUrl);
|
|
17
|
-
const
|
|
17
|
+
const octokit = getClient({githubToken, githubUrl, githubApiPathPrefix, proxy});
|
|
18
18
|
let releaseId;
|
|
19
19
|
|
|
20
20
|
const release = {owner, repo, name, prerelease: isPrerelease(branch), tag_name: gitTag};
|
|
@@ -24,14 +24,14 @@ module.exports = async (pluginConfig, context) => {
|
|
|
24
24
|
try {
|
|
25
25
|
({
|
|
26
26
|
data: {id: releaseId},
|
|
27
|
-
} = await
|
|
27
|
+
} = await octokit.request('GET /repos/{owner}/{repo}/releases/tags/{tag}', {owner, repo, tag: gitTag}));
|
|
28
28
|
} catch (error) {
|
|
29
29
|
if (error.status === 404) {
|
|
30
30
|
logger.log('There is no release for tag %s, creating a new one', gitTag);
|
|
31
31
|
|
|
32
32
|
const {
|
|
33
33
|
data: {html_url: url},
|
|
34
|
-
} = await
|
|
34
|
+
} = await octokit.request('POST /repos/{owner}/{repo}/releases', {...release, body: notes});
|
|
35
35
|
|
|
36
36
|
logger.log('Published GitHub release: %s', url);
|
|
37
37
|
return {url, name: RELEASE_NAME};
|
|
@@ -44,7 +44,7 @@ module.exports = async (pluginConfig, context) => {
|
|
|
44
44
|
|
|
45
45
|
const {
|
|
46
46
|
data: {html_url: url},
|
|
47
|
-
} = await
|
|
47
|
+
} = await octokit.request('PATCH /repos/{owner}/{repo}/releases/{release_id}', {...release, release_id: releaseId});
|
|
48
48
|
|
|
49
49
|
logger.log('Updated GitHub release: %s', url);
|
|
50
50
|
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Default exponential backoff configuration for retries.
|
|
3
|
+
*/
|
|
4
|
+
const RETRY_CONF = {
|
|
5
|
+
// By default, Octokit does not retry on 404s.
|
|
6
|
+
// But we want to retry on 404s to account for replication lag.
|
|
7
|
+
doNotRetry: [400, 401, 403, 422],
|
|
8
|
+
};
|
|
9
|
+
|
|
10
|
+
module.exports = {RETRY_CONF};
|
package/lib/fail.js
CHANGED
|
@@ -22,11 +22,12 @@ module.exports = async (pluginConfig, context) => {
|
|
|
22
22
|
if (failComment === false || failTitle === false) {
|
|
23
23
|
logger.log('Skip issue creation.');
|
|
24
24
|
} else {
|
|
25
|
-
const
|
|
25
|
+
const octokit = getClient({githubToken, githubUrl, githubApiPathPrefix, proxy});
|
|
26
26
|
// In case the repo changed name, get the new `repo`/`owner` as the search API will not follow redirects
|
|
27
|
-
const
|
|
27
|
+
const {data: repoData} = await octokit.request('GET /repos/{owner}/{repo}', parseGithubUrl(repositoryUrl));
|
|
28
|
+
const [owner, repo] = repoData.full_name.split('/');
|
|
28
29
|
const body = failComment ? template(failComment)({branch, errors}) : getFailComment(branch, errors);
|
|
29
|
-
const [srIssue] = await findSRIssues(
|
|
30
|
+
const [srIssue] = await findSRIssues(octokit, failTitle, owner, repo);
|
|
30
31
|
|
|
31
32
|
if (srIssue) {
|
|
32
33
|
logger.log('Found existing semantic-release issue #%d.', srIssue.number);
|
|
@@ -34,14 +35,21 @@ module.exports = async (pluginConfig, context) => {
|
|
|
34
35
|
debug('create comment: %O', comment);
|
|
35
36
|
const {
|
|
36
37
|
data: {html_url: url},
|
|
37
|
-
} = await
|
|
38
|
+
} = await octokit.request('POST /repos/{owner}/{repo}/issues/{issue_number}/comments', comment);
|
|
38
39
|
logger.log('Added comment to issue #%d: %s.', srIssue.number, url);
|
|
39
40
|
} else {
|
|
40
|
-
const newIssue = {
|
|
41
|
+
const newIssue = {
|
|
42
|
+
owner,
|
|
43
|
+
repo,
|
|
44
|
+
title: failTitle,
|
|
45
|
+
body: `${body}\n\n${ISSUE_ID}`,
|
|
46
|
+
labels: labels || [],
|
|
47
|
+
assignees,
|
|
48
|
+
};
|
|
41
49
|
debug('create issue: %O', newIssue);
|
|
42
50
|
const {
|
|
43
51
|
data: {html_url: url, number},
|
|
44
|
-
} = await
|
|
52
|
+
} = await octokit.request('POST /repos/{owner}/{repo}/issues', newIssue);
|
|
45
53
|
logger.log('Created issue #%d: %s.', number, url);
|
|
46
54
|
}
|
|
47
55
|
}
|
package/lib/find-sr-issues.js
CHANGED
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
const {ISSUE_ID} = require('./definitions/constants');
|
|
2
2
|
|
|
3
|
-
module.exports = async (
|
|
3
|
+
module.exports = async (octokit, title, owner, repo) => {
|
|
4
4
|
const {
|
|
5
5
|
data: {items: issues},
|
|
6
|
-
} = await
|
|
6
|
+
} = await octokit.request('GET /search/issues', {
|
|
7
7
|
q: `in:title+repo:${owner}/${repo}+type:issue+state:open+${title}`,
|
|
8
8
|
});
|
|
9
9
|
|
package/lib/get-client.js
CHANGED
|
@@ -1,63 +1,24 @@
|
|
|
1
|
-
const {memoize, get} = require('lodash');
|
|
2
|
-
const {Octokit} = require('@octokit/rest');
|
|
3
|
-
const pRetry = require('p-retry');
|
|
4
|
-
const Bottleneck = require('bottleneck');
|
|
5
1
|
const urljoin = require('url-join');
|
|
6
|
-
const HttpProxyAgent = require('http-proxy-agent');
|
|
7
|
-
const HttpsProxyAgent = require('https-proxy-agent');
|
|
2
|
+
const {HttpProxyAgent} = require('http-proxy-agent');
|
|
3
|
+
const {HttpsProxyAgent} = require('https-proxy-agent');
|
|
8
4
|
|
|
9
|
-
const
|
|
10
|
-
|
|
11
|
-
/**
|
|
12
|
-
* Http error status for which to not retry.
|
|
13
|
-
*/
|
|
14
|
-
const SKIP_RETRY_CODES = new Set([400, 401, 403]);
|
|
15
|
-
|
|
16
|
-
/**
|
|
17
|
-
* Create or retrieve the throttler function for a given rate limit group.
|
|
18
|
-
*
|
|
19
|
-
* @param {Array} rate The rate limit group.
|
|
20
|
-
* @param {String} limit The rate limits per API endpoints.
|
|
21
|
-
* @param {Bottleneck} globalThrottler The global throttler.
|
|
22
|
-
*
|
|
23
|
-
* @return {Bottleneck} The throller function for the given rate limit group.
|
|
24
|
-
*/
|
|
25
|
-
const getThrottler = memoize((rate, globalThrottler) =>
|
|
26
|
-
new Bottleneck({minTime: get(RATE_LIMITS, rate)}).chain(globalThrottler)
|
|
27
|
-
);
|
|
5
|
+
const SemanticReleaseOctokit = require('./semantic-release-octokit');
|
|
28
6
|
|
|
29
7
|
module.exports = ({githubToken, githubUrl, githubApiPathPrefix, proxy}) => {
|
|
30
8
|
const baseUrl = githubUrl && urljoin(githubUrl, githubApiPathPrefix);
|
|
31
|
-
const
|
|
32
|
-
const github = new Octokit({
|
|
9
|
+
const octokit = new SemanticReleaseOctokit({
|
|
33
10
|
auth: `token ${githubToken}`,
|
|
34
11
|
baseUrl,
|
|
35
12
|
request: {
|
|
36
13
|
agent: proxy
|
|
37
14
|
? baseUrl && new URL(baseUrl).protocol.replace(':', '') === 'http'
|
|
38
|
-
?
|
|
39
|
-
|
|
15
|
+
? // Some `proxy.headers` need to be passed as second arguments since version 6 or 7
|
|
16
|
+
// For simplicity, we just pass the same proxy object twice. It works 🤷🏻
|
|
17
|
+
new HttpProxyAgent(proxy, proxy)
|
|
18
|
+
: new HttpsProxyAgent(proxy, proxy)
|
|
40
19
|
: undefined,
|
|
41
20
|
},
|
|
42
21
|
});
|
|
43
22
|
|
|
44
|
-
|
|
45
|
-
const access = options.method === 'GET' ? 'read' : 'write';
|
|
46
|
-
const rateCategory = options.url.startsWith('/search') ? 'search' : 'core';
|
|
47
|
-
const limitKey = [rateCategory, RATE_LIMITS[rateCategory][access] && access].filter(Boolean).join('.');
|
|
48
|
-
|
|
49
|
-
return pRetry(async () => {
|
|
50
|
-
try {
|
|
51
|
-
return await getThrottler(limitKey, globalThrottler).wrap(request)(options);
|
|
52
|
-
} catch (error) {
|
|
53
|
-
if (SKIP_RETRY_CODES.has(error.status)) {
|
|
54
|
-
throw new pRetry.AbortError(error);
|
|
55
|
-
}
|
|
56
|
-
|
|
57
|
-
throw error;
|
|
58
|
-
}
|
|
59
|
-
}, RETRY_CONF);
|
|
60
|
-
});
|
|
61
|
-
|
|
62
|
-
return github;
|
|
23
|
+
return octokit;
|
|
63
24
|
};
|
package/lib/publish.js
CHANGED
|
@@ -20,7 +20,7 @@ module.exports = async (pluginConfig, context) => {
|
|
|
20
20
|
} = context;
|
|
21
21
|
const {githubToken, githubUrl, githubApiPathPrefix, proxy, assets} = resolveConfig(pluginConfig, context);
|
|
22
22
|
const {owner, repo} = parseGithubUrl(repositoryUrl);
|
|
23
|
-
const
|
|
23
|
+
const octokit = getClient({githubToken, githubUrl, githubApiPathPrefix, proxy});
|
|
24
24
|
const release = {
|
|
25
25
|
owner,
|
|
26
26
|
repo,
|
|
@@ -37,7 +37,7 @@ module.exports = async (pluginConfig, context) => {
|
|
|
37
37
|
if (!assets || assets.length === 0) {
|
|
38
38
|
const {
|
|
39
39
|
data: {html_url: url, id: releaseId},
|
|
40
|
-
} = await
|
|
40
|
+
} = await octokit.request('POST /repos/{owner}/{repo}/releases', release);
|
|
41
41
|
|
|
42
42
|
logger.log('Published GitHub release: %s', url);
|
|
43
43
|
return {url, name: RELEASE_NAME, id: releaseId};
|
|
@@ -49,7 +49,7 @@ module.exports = async (pluginConfig, context) => {
|
|
|
49
49
|
|
|
50
50
|
const {
|
|
51
51
|
data: {upload_url: uploadUrl, id: releaseId},
|
|
52
|
-
} = await
|
|
52
|
+
} = await octokit.request('POST /repos/{owner}/{repo}/releases', draftRelease);
|
|
53
53
|
|
|
54
54
|
// Append assets to the release
|
|
55
55
|
const globbedAssets = await globAssets(context, assets);
|
|
@@ -74,6 +74,7 @@ module.exports = async (pluginConfig, context) => {
|
|
|
74
74
|
|
|
75
75
|
const fileName = template(asset.name || path.basename(filePath))(context);
|
|
76
76
|
const upload = {
|
|
77
|
+
method: 'POST',
|
|
77
78
|
url: uploadUrl,
|
|
78
79
|
data: await readFile(path.resolve(cwd, filePath)),
|
|
79
80
|
name: fileName,
|
|
@@ -92,14 +93,19 @@ module.exports = async (pluginConfig, context) => {
|
|
|
92
93
|
|
|
93
94
|
const {
|
|
94
95
|
data: {browser_download_url: downloadUrl},
|
|
95
|
-
} = await
|
|
96
|
+
} = await octokit.request(upload);
|
|
96
97
|
logger.log('Published file %s', downloadUrl);
|
|
97
98
|
})
|
|
98
99
|
);
|
|
99
100
|
|
|
100
101
|
const {
|
|
101
102
|
data: {html_url: url},
|
|
102
|
-
} = await
|
|
103
|
+
} = await octokit.request('PATCH /repos/{owner}/{repo}/releases/{release_id}', {
|
|
104
|
+
owner,
|
|
105
|
+
repo,
|
|
106
|
+
release_id: releaseId,
|
|
107
|
+
draft: false,
|
|
108
|
+
});
|
|
103
109
|
|
|
104
110
|
logger.log('Published GitHub release: %s', url);
|
|
105
111
|
return {url, name: RELEASE_NAME, id: releaseId};
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
/* istanbul ignore file */
|
|
2
|
+
|
|
3
|
+
// If maintaining @octokit/core and the separate plugins gets to cumbersome
|
|
4
|
+
// then the `octokit` package can be used which has all these plugins included.
|
|
5
|
+
// However the `octokit` package has a lot of other things we don't care about.
|
|
6
|
+
// We use only the bits we need to minimize the size of the package.
|
|
7
|
+
const {Octokit} = require('@octokit/core');
|
|
8
|
+
const {paginateRest} = require('@octokit/plugin-paginate-rest');
|
|
9
|
+
const {retry} = require('@octokit/plugin-retry');
|
|
10
|
+
const {throttling} = require('@octokit/plugin-throttling');
|
|
11
|
+
|
|
12
|
+
const {RETRY_CONF} = require('./definitions/retry');
|
|
13
|
+
const {THROTTLE_CONF} = require('./definitions/throttle');
|
|
14
|
+
const {version} = require('../package.json');
|
|
15
|
+
|
|
16
|
+
const onRetry = (retryAfter, options, octokit, retryCount) => {
|
|
17
|
+
octokit.log.warn(`Request quota exhausted for request ${options.method} ${options.url}`);
|
|
18
|
+
|
|
19
|
+
if (retryCount <= RETRY_CONF.retries) {
|
|
20
|
+
octokit.log.debug(`Will retry after ${retryAfter}.`);
|
|
21
|
+
return true;
|
|
22
|
+
}
|
|
23
|
+
};
|
|
24
|
+
|
|
25
|
+
const SemanticReleaseOctokit = Octokit.plugin(paginateRest, retry, throttling).defaults({
|
|
26
|
+
userAgent: `@semantic-release/github v${version}`,
|
|
27
|
+
retry: RETRY_CONF,
|
|
28
|
+
throttle: {
|
|
29
|
+
...THROTTLE_CONF,
|
|
30
|
+
onRateLimit: onRetry,
|
|
31
|
+
onSecondaryRateLimit: onRetry,
|
|
32
|
+
},
|
|
33
|
+
});
|
|
34
|
+
|
|
35
|
+
module.exports = SemanticReleaseOctokit;
|
package/lib/success.js
CHANGED
|
@@ -32,9 +32,10 @@ module.exports = async (pluginConfig, context) => {
|
|
|
32
32
|
addReleases,
|
|
33
33
|
} = resolveConfig(pluginConfig, context);
|
|
34
34
|
|
|
35
|
-
const
|
|
35
|
+
const octokit = getClient({githubToken, githubUrl, githubApiPathPrefix, proxy});
|
|
36
36
|
// In case the repo changed name, get the new `repo`/`owner` as the search API will not follow redirects
|
|
37
|
-
const
|
|
37
|
+
const {data: repoData} = await octokit.request('GET /repos/{owner}/{repo}', parseGithubUrl(repositoryUrl));
|
|
38
|
+
const [owner, repo] = repoData.full_name.split('/');
|
|
38
39
|
|
|
39
40
|
const errors = [];
|
|
40
41
|
|
|
@@ -46,15 +47,27 @@ module.exports = async (pluginConfig, context) => {
|
|
|
46
47
|
const shas = commits.map(({hash}) => hash);
|
|
47
48
|
|
|
48
49
|
const searchQueries = getSearchQueries(`repo:${owner}/${repo}+type:pr+is:merged`, shas).map(
|
|
49
|
-
async (q) => (await
|
|
50
|
+
async (q) => (await octokit.request('GET /search/issues', {q})).data.items
|
|
50
51
|
);
|
|
51
52
|
|
|
52
|
-
const
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
53
|
+
const searchQueriesResults = await Promise.all(searchQueries);
|
|
54
|
+
const uniqueSearchQueriesResults = uniqBy(flatten(searchQueriesResults), 'number');
|
|
55
|
+
const prs = await pFilter(uniqueSearchQueriesResults, async ({number}) => {
|
|
56
|
+
const commits = await octokit.paginate('GET /repos/{owner}/{repo}/pulls/{pull_number}/commits', {
|
|
57
|
+
owner,
|
|
58
|
+
repo,
|
|
59
|
+
pull_number: number,
|
|
60
|
+
});
|
|
61
|
+
const matchingCommit = commits.find(({sha}) => shas.includes(sha));
|
|
62
|
+
if (matchingCommit) return matchingCommit;
|
|
63
|
+
|
|
64
|
+
const {data: pullRequest} = await octokit.request('GET /repos/{owner}/{repo}/pulls/{pull_number}', {
|
|
65
|
+
owner,
|
|
66
|
+
repo,
|
|
67
|
+
pull_number: number,
|
|
68
|
+
});
|
|
69
|
+
return shas.includes(pullRequest.merge_commit_sha);
|
|
70
|
+
});
|
|
58
71
|
|
|
59
72
|
debug(
|
|
60
73
|
'found pull requests: %O',
|
|
@@ -87,17 +100,15 @@ module.exports = async (pluginConfig, context) => {
|
|
|
87
100
|
debug('create comment: %O', comment);
|
|
88
101
|
const {
|
|
89
102
|
data: {html_url: url},
|
|
90
|
-
} = await
|
|
103
|
+
} = await octokit.request('POST /repos/{owner}/{repo}/issues/{issue_number}/comments', comment);
|
|
91
104
|
logger.log('Added comment to issue #%d: %s', issue.number, url);
|
|
92
105
|
|
|
93
106
|
if (releasedLabels) {
|
|
94
107
|
const labels = releasedLabels.map((label) => template(label)(context));
|
|
95
|
-
|
|
96
|
-
// https://github.com/semantic-release/github/issues/138
|
|
97
|
-
await github.request('POST /repos/:owner/:repo/issues/:number/labels', {
|
|
108
|
+
await octokit.request('POST /repos/{owner}/{repo}/issues/{issue_number}/labels', {
|
|
98
109
|
owner,
|
|
99
110
|
repo,
|
|
100
|
-
|
|
111
|
+
issue_number: issue.number,
|
|
101
112
|
data: labels,
|
|
102
113
|
});
|
|
103
114
|
logger.log('Added labels %O to issue #%d', labels, issue.number);
|
|
@@ -120,7 +131,7 @@ module.exports = async (pluginConfig, context) => {
|
|
|
120
131
|
if (failComment === false || failTitle === false) {
|
|
121
132
|
logger.log('Skip closing issue.');
|
|
122
133
|
} else {
|
|
123
|
-
const srIssues = await findSRIssues(
|
|
134
|
+
const srIssues = await findSRIssues(octokit, failTitle, owner, repo);
|
|
124
135
|
|
|
125
136
|
debug('found semantic-release issues: %O', srIssues);
|
|
126
137
|
|
|
@@ -132,7 +143,7 @@ module.exports = async (pluginConfig, context) => {
|
|
|
132
143
|
debug('closing issue: %O', updateIssue);
|
|
133
144
|
const {
|
|
134
145
|
data: {html_url: url},
|
|
135
|
-
} = await
|
|
146
|
+
} = await octokit.request('PATCH /repos/{owner}/{repo}/issues/{issue_number}', updateIssue);
|
|
136
147
|
logger.log('Closed issue #%d: %s.', issue.number, url);
|
|
137
148
|
} catch (error) {
|
|
138
149
|
errors.push(error);
|
|
@@ -153,7 +164,12 @@ module.exports = async (pluginConfig, context) => {
|
|
|
153
164
|
addReleases === 'top'
|
|
154
165
|
? additionalReleases.concat('\n---\n', nextRelease.notes)
|
|
155
166
|
: nextRelease.notes.concat('\n---\n', additionalReleases);
|
|
156
|
-
await
|
|
167
|
+
await octokit.request('PATCH /repos/{owner}/{repo}/releases/{release_id}', {
|
|
168
|
+
owner,
|
|
169
|
+
repo,
|
|
170
|
+
release_id: ghRelaseId,
|
|
171
|
+
body: newBody,
|
|
172
|
+
});
|
|
157
173
|
}
|
|
158
174
|
}
|
|
159
175
|
}
|
package/lib/verify.js
CHANGED
|
@@ -55,12 +55,12 @@ module.exports = async (pluginConfig, context) => {
|
|
|
55
55
|
if (!owner || !repo) {
|
|
56
56
|
errors.push(getError('EINVALIDGITHUBURL'));
|
|
57
57
|
} else if (githubToken && !errors.find(({code}) => code === 'EINVALIDPROXY')) {
|
|
58
|
-
const
|
|
58
|
+
const octokit = getClient({githubToken, githubUrl, githubApiPathPrefix, proxy});
|
|
59
59
|
|
|
60
60
|
// https://github.com/semantic-release/github/issues/182
|
|
61
61
|
// Do not check for permissions in GitHub actions, as the provided token is an installation access token.
|
|
62
|
-
//
|
|
63
|
-
// have all permissions required for @semantic-release/github to work
|
|
62
|
+
// octokit.request("GET /repos/{owner}/{repo}", {repo, owner}) does not return the "permissions" key in that case.
|
|
63
|
+
// But GitHub Actions have all permissions required for @semantic-release/github to work
|
|
64
64
|
if (env.GITHUB_ACTION) {
|
|
65
65
|
return;
|
|
66
66
|
}
|
|
@@ -70,13 +70,13 @@ module.exports = async (pluginConfig, context) => {
|
|
|
70
70
|
data: {
|
|
71
71
|
permissions: {push},
|
|
72
72
|
},
|
|
73
|
-
} = await
|
|
73
|
+
} = await octokit.request('GET /repos/{owner}/{repo}', {repo, owner});
|
|
74
74
|
if (!push) {
|
|
75
75
|
// If authenticated as GitHub App installation, `push` will always be false.
|
|
76
76
|
// We send another request to check if current authentication is an installation.
|
|
77
77
|
// Note: we cannot check if the installation has all required permissions, it's
|
|
78
78
|
// up to the user to make sure it has
|
|
79
|
-
if (await
|
|
79
|
+
if (await octokit.request('HEAD /installation/repositories', {per_page: 1}).catch(() => false)) {
|
|
80
80
|
return;
|
|
81
81
|
}
|
|
82
82
|
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@semantic-release/github",
|
|
3
3
|
"description": "semantic-release plugin to publish a GitHub release and comment on released Pull Requests/Issues",
|
|
4
|
-
"version": "8.0.
|
|
4
|
+
"version": "8.0.9",
|
|
5
5
|
"author": "Pierre Vanduynslager (https://twitter.com/@pvdlg_)",
|
|
6
6
|
"ava": {
|
|
7
7
|
"files": [
|
|
@@ -16,34 +16,35 @@
|
|
|
16
16
|
"Gregor Martynus (https://twitter.com/gr2m)"
|
|
17
17
|
],
|
|
18
18
|
"dependencies": {
|
|
19
|
-
"@octokit/
|
|
19
|
+
"@octokit/core": "^4.2.1",
|
|
20
|
+
"@octokit/plugin-paginate-rest": "^6.1.2",
|
|
21
|
+
"@octokit/plugin-retry": "^4.1.3",
|
|
22
|
+
"@octokit/plugin-throttling": "^5.2.3",
|
|
20
23
|
"@semantic-release/error": "^3.0.0",
|
|
21
24
|
"aggregate-error": "^3.0.0",
|
|
22
|
-
"bottleneck": "^2.18.1",
|
|
23
25
|
"debug": "^4.0.0",
|
|
24
26
|
"dir-glob": "^3.0.0",
|
|
25
27
|
"fs-extra": "^11.0.0",
|
|
26
28
|
"globby": "^11.0.0",
|
|
27
|
-
"http-proxy-agent": "^
|
|
28
|
-
"https-proxy-agent": "^
|
|
29
|
+
"http-proxy-agent": "^7.0.0",
|
|
30
|
+
"https-proxy-agent": "^7.0.0",
|
|
29
31
|
"issue-parser": "^6.0.0",
|
|
30
32
|
"lodash": "^4.17.4",
|
|
31
33
|
"mime": "^3.0.0",
|
|
32
34
|
"p-filter": "^2.0.0",
|
|
33
|
-
"p-retry": "^4.0.0",
|
|
34
35
|
"url-join": "^4.0.0"
|
|
35
36
|
},
|
|
36
37
|
"devDependencies": {
|
|
37
38
|
"ava": "5.1.0",
|
|
38
39
|
"clear-module": "4.1.2",
|
|
39
40
|
"codecov": "3.8.3",
|
|
40
|
-
"nock": "13.
|
|
41
|
+
"nock": "13.3.1",
|
|
41
42
|
"nyc": "15.1.0",
|
|
42
43
|
"proxy": "1.0.2",
|
|
43
44
|
"proxyquire": "2.1.3",
|
|
44
|
-
"semantic-release": "
|
|
45
|
+
"semantic-release": "21.0.2",
|
|
45
46
|
"server-destroy": "1.0.1",
|
|
46
|
-
"sinon": "15.
|
|
47
|
+
"sinon": "15.1.0",
|
|
47
48
|
"tempy": "1.0.1",
|
|
48
49
|
"xo": "0.36.1"
|
|
49
50
|
},
|
|
@@ -88,7 +89,8 @@
|
|
|
88
89
|
"trailingComma": "es5"
|
|
89
90
|
},
|
|
90
91
|
"publishConfig": {
|
|
91
|
-
"access": "public"
|
|
92
|
+
"access": "public",
|
|
93
|
+
"provenance": true
|
|
92
94
|
},
|
|
93
95
|
"repository": {
|
|
94
96
|
"type": "git",
|
|
@@ -1,27 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Default exponential backoff configuration for retries.
|
|
3
|
-
*/
|
|
4
|
-
const RETRY_CONF = {retries: 3, factor: 2, minTimeout: 1000};
|
|
5
|
-
|
|
6
|
-
/**
|
|
7
|
-
* Rate limit per API endpoints.
|
|
8
|
-
*
|
|
9
|
-
* See {@link https://developer.github.com/v3/search/#rate-limit|Search API rate limit}.
|
|
10
|
-
* See {@link https://developer.github.com/v3/#rate-limiting|Rate limiting}.
|
|
11
|
-
*/
|
|
12
|
-
const RATE_LIMITS = {
|
|
13
|
-
search: ((60 * 1000) / 30) * 1.1, // 30 calls per minutes => 1 call every 2s + 10% safety margin
|
|
14
|
-
core: {
|
|
15
|
-
read: ((60 * 60 * 1000) / 5000) * 1.1, // 5000 calls per hour => 1 call per 720ms + 10% safety margin
|
|
16
|
-
write: 3000, // 1 call every 3 seconds
|
|
17
|
-
},
|
|
18
|
-
};
|
|
19
|
-
|
|
20
|
-
/**
|
|
21
|
-
* Global rate limit to prevent abuse.
|
|
22
|
-
*
|
|
23
|
-
* See {@link https://developer.github.com/v3/guides/best-practices-for-integrators/#dealing-with-abuse-rate-limits|Dealing with abuse rate limits}
|
|
24
|
-
*/
|
|
25
|
-
const GLOBAL_RATE_LIMIT = 1000;
|
|
26
|
-
|
|
27
|
-
module.exports = {RETRY_CONF, RATE_LIMITS, GLOBAL_RATE_LIMIT};
|