Skip to content

Commit

Permalink
feat: add global throttler to comply with GitHub recommendation
Browse files Browse the repository at this point in the history
  • Loading branch information
pvdlg authored and gr2m committed Feb 18, 2018
1 parent c0773b6 commit 9ea2730
Show file tree
Hide file tree
Showing 13 changed files with 272 additions and 195 deletions.
38 changes: 31 additions & 7 deletions lib/get-client.js
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,13 @@ const RATE_LIMITS = {
core: [5000, 60 * 60 * 1000],
};

/**
* Global rate limit to prevent abuse.
*
* See {@link https://developer.github.com/v3/guides/best-practices-for-integrators/#dealing-with-abuse-rate-limits|Dealing with abuse rate limits}
*/
const GLOBAL_RATE_LIMIT = [1, 1000];

/**
* Http error codes for which to not retry.
*/
Expand All @@ -32,14 +39,22 @@ const SKIP_RETRY_CODES = [400, 401, 403];
*/

/**
* Create or retrieve the throller function for a given rate limit group.
* Create or retrieve the throttler function for a given rate limit group.
*
* @param {Array} rate The rate limit group.
* @param {String} limit The rate limits per API endpoints.
* @type {Throttler} The throller function for the given rate limit group.
* @return {Throttler} The throller function for the given rate limit group.
*/
const getThrottler = memoize((rate, limit) => pThrottle((func, ...args) => func(...args), ...limit[rate]));

/**
* Create the global throttler function to comply with GitHub abuse prevention recommandations.
*
* @param {Array} globalLimit The global rate limit.
* @return {Throttler} The throller function for the global rate limit.
*/
const getGlobalThrottler = globalLimit => pThrottle((func, ...args) => func(...args), ...globalLimit);

/**
* Create a`handler` for a `Proxy` wrapping an Octokit instance to:
* - Recursively wrap the child objects of the Octokit instance in a `Proxy`
Expand All @@ -48,9 +63,10 @@ const getThrottler = memoize((rate, limit) => pThrottle((func, ...args) => func(
* @param {Object} retry The configuration to pass to `p-retry`.
* @param {Array} limit The rate limits per API endpoints.
* @param {String} endpoint The API endpoint to handle.
* @param {Throttler} globalThrottler The throller function for the global rate limit.
* @return {Function} The `handler` for a `Proxy` wrapping an Octokit instance.
*/
const handler = (retry, limit, endpoint) => ({
const handler = (retry, limit, globalThrottler, endpoint) => ({
/**
* If the target has the property as own, determine the rate limit based on the property name and recursively wrap the value in a `Proxy`. Otherwise returns the property value.
*
Expand All @@ -61,7 +77,7 @@ const handler = (retry, limit, endpoint) => ({
*/
get: (target, name, receiver) =>
Object.prototype.hasOwnProperty.call(target, name)
? new Proxy(target[name], handler(retry, limit, endpoint || name))
? new Proxy(target[name], handler(retry, limit, globalThrottler, endpoint || name))
: Reflect.get(target, name, receiver),

/**
Expand All @@ -74,9 +90,10 @@ const handler = (retry, limit, endpoint) => ({
*/
apply: (func, that, args) => {
const throttler = getThrottler(limit[endpoint] ? endpoint : 'core', limit);

return pRetry(async () => {
try {
return await throttler(func, ...args);
return await globalThrottler((func, ...args) => throttler(func, ...args), func, ...args);
} catch (err) {
if (SKIP_RETRY_CODES.includes(err.code)) {
throw new pRetry.AbortError(err);
Expand All @@ -87,7 +104,14 @@ const handler = (retry, limit, endpoint) => ({
},
});

module.exports = ({githubToken, githubUrl, githubApiPathPrefix, retry = DEFAULT_RETRY, limit = RATE_LIMITS}) => {
module.exports = ({
githubToken,
githubUrl,
githubApiPathPrefix,
retry = DEFAULT_RETRY,
limit = RATE_LIMITS,
globalLimit = GLOBAL_RATE_LIMIT,
}) => {
const {port, protocol, hostname} = githubUrl ? url.parse(githubUrl) : {};
const github = new Octokit({
port,
Expand All @@ -96,5 +120,5 @@ module.exports = ({githubToken, githubUrl, githubApiPathPrefix, retry = DEFAULT_
pathPrefix: githubApiPathPrefix,
});
github.authenticate({type: 'token', token: githubToken});
return new Proxy(github, handler(retry, limit));
return new Proxy(github, handler(retry, limit, getGlobalThrottler(globalLimit)));
};
79 changes: 40 additions & 39 deletions lib/glob-assets.js
Original file line number Diff line number Diff line change
@@ -1,49 +1,50 @@
const {basename} = require('path');
const {isPlainObject, castArray, uniqWith} = require('lodash');
const pReduce = require('p-reduce');
const globby = require('globby');
const debug = require('debug')('semantic-release:github');

module.exports = async assets =>
uniqWith(
(await pReduce(
assets,
async (result, asset) => {
// Wrap single glob definition in Array
const glob = castArray(isPlainObject(asset) ? asset.path : asset);
// Skip solo negated pattern (avoid to include every non js file with `!**/*.js`)
if (glob.length <= 1 && glob[0].startsWith('!')) {
debug(
'skipping the negated glob %o as its alone in its group and would retrieve a large amount of files ',
glob[0]
);
return result;
}
const globbed = await globby(glob, {expandDirectories: true, gitignore: false, dot: true});
if (isPlainObject(asset)) {
if (globbed.length > 1) {
// If asset is an Object with a glob the `path` property that resolve to multiple files,
// Output an Object definition for each file matched and set each one with:
// - `path` of the matched file
// - `name` based on the actual file name (to avoid assets with duplicate `name`)
// - other properties of the original asset definition
return [...result, ...globbed.map(file => Object.assign({}, asset, {path: file, name: basename(file)}))];
}
// If asset is an Object, output an Object definition with:
// - `path` of the matched file if there is one, or the original `path` definition (will be considered as a missing file)
// - other properties of the original asset definition
return [...result, Object.assign({}, asset, {path: globbed[0] || asset.path})];
}
if (globbed.length > 0) {
// If asset is a String definition, output each files matched
return [...result, ...globbed];
}
// If asset is a String definition but no match is found, output the elements of the original glob (each one will be considered as a missing file)
return [...result, ...glob];
},
[]
// Sort with Object first, to prioritize Object definition over Strings in dedup
)).sort(asset => !isPlainObject(asset)),
[]
.concat(
...(await Promise.all(
assets.map(async asset => {
// Wrap single glob definition in Array
const glob = castArray(isPlainObject(asset) ? asset.path : asset);
// Skip solo negated pattern (avoid to include every non js file with `!**/*.js`)
if (glob.length <= 1 && glob[0].startsWith('!')) {
debug(
'skipping the negated glob %o as its alone in its group and would retrieve a large amount of files ',
glob[0]
);
return [];
}
const globbed = await globby(glob, {expandDirectories: true, gitignore: false, dot: true});
if (isPlainObject(asset)) {
if (globbed.length > 1) {
// If asset is an Object with a glob the `path` property that resolve to multiple files,
// Output an Object definition for each file matched and set each one with:
// - `path` of the matched file
// - `name` based on the actual file name (to avoid assets with duplicate `name`)
// - other properties of the original asset definition
return globbed.map(file => Object.assign({}, asset, {path: file, name: basename(file)}));
}
// If asset is an Object, output an Object definition with:
// - `path` of the matched file if there is one, or the original `path` definition (will be considered as a missing file)
// - other properties of the original asset definition
return Object.assign({}, asset, {path: globbed[0] || asset.path});
}
if (globbed.length > 0) {
// If asset is a String definition, output each files matched
return globbed;
}
// If asset is a String definition but no match is found, output the elements of the original glob (each one will be considered as a missing file)
return glob;
})
// Sort with Object first, to prioritize Object definition over Strings in dedup
))
)
.sort(asset => !isPlainObject(asset)),
// Compare `path` property if Object definition, value itself if String
(a, b) => (isPlainObject(a) ? a.path : a) === (isPlainObject(b) ? b.path : b)
);
67 changes: 34 additions & 33 deletions lib/publish.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@ const {basename, extname} = require('path');
const {stat, readFile} = require('fs-extra');
const {isPlainObject} = require('lodash');
const parseGithubUrl = require('parse-github-url');
const pReduce = require('p-reduce');
const mime = require('mime');
const debug = require('debug')('semantic-release:github');
const globAssets = require('./glob-assets.js');
Expand All @@ -26,43 +25,45 @@ module.exports = async (pluginConfig, {options: {branch, repositoryUrl}, nextRel
if (assets && assets.length > 0) {
const globbedAssets = await globAssets(assets);
debug('globed assets: %o', globbedAssets);
// Make requests serially to avoid hitting the rate limit (https://developer.github.com/v3/guides/best-practices-for-integrators/#dealing-with-abuse-rate-limits)
await pReduce(globbedAssets, async (_, asset) => {
const filePath = isPlainObject(asset) ? asset.path : asset;
let file;

try {
file = await stat(filePath);
} catch (err) {
logger.error('The asset %s cannot be read, and will be ignored.', filePath);
return;
}
if (!file || !file.isFile()) {
logger.error('The asset %s is not a file, and will be ignored.', filePath);
return;
}
await Promise.all(
globbedAssets.map(async asset => {
const filePath = isPlainObject(asset) ? asset.path : asset;
let file;

const fileName = asset.name || basename(filePath);
const upload = {
owner,
repo,
url: uploadUrl,
file: await readFile(filePath),
contentType: mime.getType(extname(fileName)) || 'text/plain',
contentLength: file.size,
name: fileName,
};
try {
file = await stat(filePath);
} catch (err) {
logger.error('The asset %s cannot be read, and will be ignored.', filePath);
return;
}
if (!file || !file.isFile()) {
logger.error('The asset %s is not a file, and will be ignored.', filePath);
return;
}

debug('file path: %o', filePath);
debug('file name: %o', fileName);
const fileName = asset.name || basename(filePath);
const upload = {
owner,
repo,
url: uploadUrl,
file: await readFile(filePath),
contentType: mime.getType(extname(fileName)) || 'text/plain',
contentLength: file.size,
name: fileName,
};

if (isPlainObject(asset) && asset.label) {
upload.label = asset.label;
}
debug('file path: %o', filePath);
debug('file name: %o', fileName);

const {data: {browser_download_url: downloadUrl}} = await github.repos.uploadAsset(upload);
logger.log('Published file %s', downloadUrl);
});
if (isPlainObject(asset) && asset.label) {
upload.label = asset.label;
}

const {data: {browser_download_url: downloadUrl}} = await github.repos.uploadAsset(upload);
logger.log('Published file %s', downloadUrl);
})
);
}

return {url, name: 'GitHub release'};
Expand Down
76 changes: 39 additions & 37 deletions lib/success.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
const {uniqBy, template} = require('lodash');
const parseGithubUrl = require('parse-github-url');
const pReduce = require('p-reduce');
const AggregateError = require('aggregate-error');
const issueParser = require('issue-parser')('github');
const debug = require('debug')('semantic-release:github');
Expand All @@ -19,13 +18,13 @@ module.exports = async (
const github = getClient({githubToken, githubUrl, githubApiPathPrefix});
const releaseInfos = releases.filter(release => Boolean(release.name));

const prs = await pReduce(
getSearchQueries(`repo:${owner}/${repo}+type:pr`, commits.map(commit => commit.hash)),
async (prs, q) => {
const {data: {items}} = await github.search.issues({q});
return [...prs, ...items];
},
[]
const prs = [].concat(
...(await Promise.all(
getSearchQueries(`repo:${owner}/${repo}+type:pr`, commits.map(commit => commit.hash)).map(async q => {
const {data: {items}} = await github.search.issues({q});
return items;
})
))
);

debug('found pull requests: %O', prs.map(pr => pr.number));
Expand All @@ -46,40 +45,43 @@ module.exports = async (

const errors = [];

// Make requests serially to avoid hitting the rate limit (https://developer.github.com/v3/guides/best-practices-for-integrators/#dealing-with-abuse-rate-limits)
await pReduce([...prs, ...issues], async (_, issue) => {
const body = successComment
? template(successComment)({branch, lastRelease, commits, nextRelease, releases, issue})
: getSuccessComment(issue, releaseInfos, nextRelease);
try {
const comment = {owner, repo, number: issue.number, body};
debug('create comment: %O', comment);
const {data: {html_url: url}} = await github.issues.createComment(comment);
logger.log('Added comment to issue #%d: %s', issue.number, url);
} catch (err) {
errors.push(err);
logger.error('Failed to add a comment to the issue #%d.', issue.number);
// Don't throw right away and continue to update other issues
}
});
await Promise.all(
[...prs, ...issues].map(async issue => {
const body = successComment
? template(successComment)({branch, lastRelease, commits, nextRelease, releases, issue})
: getSuccessComment(issue, releaseInfos, nextRelease);
try {
const comment = {owner, repo, number: issue.number, body};
debug('create comment: %O', comment);
const {data: {html_url: url}} = await github.issues.createComment(comment);
logger.log('Added comment to issue #%d: %s', issue.number, url);
} catch (err) {
errors.push(err);
logger.error('Failed to add a comment to the issue #%d.', issue.number);
// Don't throw right away and continue to update other issues
}
})
);

const srIssues = await findSRIssues(github, failTitle, owner, repo);

debug('found semantic-release issues: %O', srIssues);

await pReduce(srIssues, async (_, issue) => {
debug('close issue: %O', issue);
try {
const updateIssue = {owner, repo, number: issue.number, state: 'closed'};
debug('closing issue: %O', updateIssue);
const {data: {html_url: url}} = await github.issues.edit(updateIssue);
logger.log('Closed issue #%d: %s.', issue.number, url);
} catch (err) {
errors.push(err);
logger.error('Failed to close the issue #%d.', issue.number);
// Don't throw right away and continue to close other issues
}
});
await Promise.all(
srIssues.map(async issue => {
debug('close issue: %O', issue);
try {
const updateIssue = {owner, repo, number: issue.number, state: 'closed'};
debug('closing issue: %O', updateIssue);
const {data: {html_url: url}} = await github.issues.edit(updateIssue);
logger.log('Closed issue #%d: %s.', issue.number, url);
} catch (err) {
errors.push(err);
logger.error('Failed to close the issue #%d.', issue.number);
// Don't throw right away and continue to close other issues
}
})
);

if (errors.length > 0) {
throw new AggregateError(errors);
Expand Down
1 change: 0 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
"issue-parser": "^1.0.1",
"lodash": "^4.17.4",
"mime": "^2.0.3",
"p-reduce": "^1.0.0",
"p-retry": "^1.0.0",
"p-throttle": "^1.1.0",
"parse-github-url": "^1.0.1",
Expand Down
Loading

0 comments on commit 9ea2730

Please sign in to comment.