Skip to content

Commit bfc7e0f

Browse files
committed
ncu-ci: support parsing CI links from PR thread
1 parent 713a9e9 commit bfc7e0f

File tree

6 files changed

+158
-42
lines changed

6 files changed

+158
-42
lines changed

bin/ncu-ci

Lines changed: 27 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,9 @@
33
'use strict';
44

55
const {
6-
PRBuild, BenchmarkRun, CommitBuild, jobCache, parseJobFromURL, constants
6+
PRBuild, BenchmarkRun, CommitBuild, parseJobFromURL,
7+
constants, JobParser
8+
// , jobCache
79
} = require('../lib/ci');
810
const clipboardy = require('clipboardy');
911

@@ -12,13 +14,14 @@ const {
1214
} = constants;
1315

1416
const { runPromise } = require('../lib/run');
17+
const auth = require('../lib/auth');
1518
const Request = require('../lib/request');
1619
const CLI = require('../lib/cli');
1720
const yargs = require('yargs');
1821

1922
// This is used for testing
2023
// Default cache dir is ${ncu-source-dir}/.ncu/cache
21-
jobCache.enable();
24+
// jobCache.enable();
2225

2326
// eslint-disable-next-line no-unused-vars
2427
const argv = yargs
@@ -99,7 +102,8 @@ async function getResults(cli, request, job) {
99102

100103
async function main(command, argv) {
101104
const cli = new CLI();
102-
const request = new Request({});
105+
const credentials = await auth();
106+
const request = new Request(credentials);
103107
const queue = [];
104108

105109
const commandToType = {
@@ -114,31 +118,45 @@ async function main(command, argv) {
114118
queue.push({
115119
type: parsed.type,
116120
jobid: parsed.jobid,
117-
markdown: argv.markdown
121+
copy: argv.copy
118122
});
119123
} else {
120-
// TODO: parse CI links from PR thread
121-
return yargs.showHelp();
124+
const parser = await JobParser.fromPR(argv.url, cli, request);
125+
if (!parser) { // Not a valid PR URL
126+
return yargs.showHelp();
127+
}
128+
const ciMap = parser.parse();
129+
for (const [type, ci] of ciMap) {
130+
queue.push({
131+
type: type,
132+
jobid: ci.jobid,
133+
copy: argv.copy
134+
});
135+
}
122136
}
123137
} else {
124138
queue.push({
125139
type: commandToType[command],
126140
jobid: argv.jobid,
127-
markdown: argv.markdown
141+
copy: argv.copy
128142
});
129143
}
130144

145+
let dataToCopy = '';
146+
131147
for (let job of queue) {
132148
const build = await getResults(cli, request, job);
133149
build.display();
134150

135151
if (argv.copy) {
136-
clipboardy.writeSync(build.formatAsMarkdown());
152+
dataToCopy += build.formatAsMarkdown();
137153
}
154+
}
138155

156+
if (argv.copy) {
157+
clipboardy.writeSync(dataToCopy);
139158
cli.separator('');
140159
cli.log(`Written markdown to clipboard`);
141-
return build;
142160
}
143161
}
144162

lib/ci.js

Lines changed: 15 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,8 @@
11
'use strict';
22

33
const Cache = require('./cache');
4-
4+
const PRData = require('./pr_data');
5+
const { parsePRFromURL } = require('./links');
56
const qs = require('querystring');
67
const chalk = require('chalk');
78

@@ -48,7 +49,7 @@ function parseJobFromURL(url) {
4849
}
4950

5051
for (let [ type, info ] of CI_TYPES) {
51-
const re = new RegExp(`${CI_DOMAIN}/job/${info.jobName}/(\\d+)`);
52+
const re = new RegExp(`job/${info.jobName}/(\\d+)`);
5253
const match = url.match(re);
5354
if (match) {
5455
return {
@@ -108,6 +109,7 @@ class JobParser {
108109
}
109110

110111
const result = [];
112+
console.log(links);
111113
for (const link of links) {
112114
const parsed = parseJobFromURL(`https:${link}`);
113115
if (parsed) {
@@ -119,6 +121,17 @@ class JobParser {
119121
}
120122
}
121123

124+
JobParser.fromPR = async function(url, cli, request) {
125+
const argv = parsePRFromURL(url);
126+
if (!argv) {
127+
return undefined;
128+
}
129+
const data = new PRData(argv, cli, request);
130+
await data.getThreadData();
131+
const thread = data.getThread();
132+
return new JobParser(thread);
133+
};
134+
122135
const SUCCESS = 'SUCCESS';
123136
const FAILURE = 'FAILURE';
124137
const ABORTED = 'ABORTED';

lib/pr_checker.js

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -194,13 +194,9 @@ class PRChecker {
194194
// TODO: we might want to check CI status when it's less flaky...
195195
// TODO: not all PR requires CI...labels?
196196
checkCI() {
197-
const { pr, cli, comments, reviews, commits, argv } = this;
198-
const prNode = {
199-
publishedAt: pr.createdAt,
200-
bodyText: pr.bodyText
201-
};
197+
const { cli, commits, argv } = this;
202198
const { maxCommits } = argv;
203-
const thread = comments.concat([prNode]).concat(reviews);
199+
const thread = this.data.getThread();
204200
const ciMap = new JobParser(thread).parse();
205201
let status = true;
206202
if (!ciMap.size) {

lib/pr_data.js

Lines changed: 18 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -38,14 +38,29 @@ class PRData {
3838
this.reviewers = [];
3939
}
4040

41+
getThread() {
42+
const { pr, comments, reviews } = this;
43+
const prNode = {
44+
publishedAt: pr.createdAt,
45+
bodyText: pr.bodyText
46+
};
47+
return comments.concat([prNode]).concat(reviews);
48+
}
49+
50+
async getThreadData() {
51+
return Promise.all([
52+
this.getPR(),
53+
this.getReviews(),
54+
this.getComments()
55+
]);
56+
}
57+
4158
async getAll(argv) {
4259
const { prStr } = this;
4360
this.cli.startSpinner(`Loading data for ${prStr}`);
4461
await Promise.all([
4562
this.getCollaborators(),
46-
this.getPR(),
47-
this.getReviews(),
48-
this.getComments(),
63+
this.getThreadData(),
4964
this.getCommits()
5065
]).then(() => {
5166
this.cli.stopSpinner(`Done loading data for ${prStr}`);

lib/request.js

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -58,6 +58,13 @@ class Request {
5858
};
5959
throw err;
6060
}
61+
if (result.message) {
62+
const err = new Error(`GraphQL request Error: ${result.message}`);
63+
err.data = {
64+
variables
65+
};
66+
throw err;
67+
}
6168
return result.data;
6269
}
6370

0 commit comments

Comments
 (0)