Skip to content

Commit

Permalink
ci: split CI aggregator and generates markdown
Browse files Browse the repository at this point in the history
  • Loading branch information
logictitans committed Aug 15, 2018
1 parent de6c2ae commit a3ed0ec
Show file tree
Hide file tree
Showing 3 changed files with 170 additions and 73 deletions.
109 changes: 37 additions & 72 deletions bin/ncu-ci
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,6 @@

'use strict';

const {
parsePRFromURL
} = require('../lib/links');

const {
JobParser,
parseJobFromURL,
Expand All @@ -15,7 +11,7 @@ const {
} = require('../lib/ci/ci_type_parser');

const {
PRBuild, BenchmarkRun, CommitBuild, listBuilds
PRBuild, BenchmarkRun, CommitBuild, listBuilds, FailureAggregator
// , jobCache
} = require('../lib/ci/ci_result_parser');
const clipboardy = require('clipboardy');
Expand All @@ -26,8 +22,6 @@ const auth = require('../lib/auth');
const Request = require('../lib/request');
const CLI = require('../lib/cli');
const yargs = require('yargs');
const _ = require('lodash');
const chalk = require('chalk');

// This is used for testing
// Default cache dir is ${ncu-source-dir}/.ncu/cache
Expand All @@ -49,12 +43,20 @@ const argv = yargs
})
.command({
command: 'walk <type>',
desc: 'Walk the CI and store the failures',
desc: 'Walk the CI and display the failures',
builder: (yargs) => {
yargs
.positional('type', {
describe: 'type of CI',
choices: ['commit', 'pr']
})
.option('stats', {
default: false,
describe: 'Aggregate the results'
})
.option('limit', {
default: 99,
describe: 'Maximum number of CIs to get data from'
});
},
handler
Expand Down Expand Up @@ -160,19 +162,10 @@ async function runQueue(queue, cli, request, argv) {
dataToJson = dataToJson.concat(build.formatAsJson());
}

if (argv.copy) {
clipboardy.writeSync(dataToCopy);
cli.separator('');
cli.log(`Written markdown to clipboard`);
}

if (argv.json) {
writeJson(argv.json, dataToJson);
cli.separator('');
cli.log(`Written JSON to ${argv.json}`);
}

return dataToJson;
return {
json: dataToJson,
copy: dataToCopy
};
}

function pad(any, length) {
Expand All @@ -199,55 +192,6 @@ function displayHealth(builds, cli) {
cli.log(result);
}

function getHighlight(f) {
return f.reason.split('\n')[f.highlight]
.replace(/not ok \d+ /, '')
.replace(
/'JNLP4-connect connection from .+?'/, 'JNLP4-connect connection from ...'
)
.replace(/FATAL: Could not checkout \w+/, 'FATAL: Could not checkout ...');
}

function aggregateFailures(cli, failures) {
const grouped = _.chain(failures)
.groupBy(getHighlight)
.toPairs()
.sortBy()
.value();
let results = [];
for (const item of grouped) {
const [ key, failures ] = item;
const cleaned = _.chain(failures)
.uniqBy('source')
.sortBy((f) => parseJobFromURL(f.upstream).jobid)
.value();
results.push([ key, failures, cleaned ]);
};

results = _.sortBy(results, r => 0 - (r[2].length));

cli.separator(chalk.bold('Stats'));
for (const item of results) {
const [ key, failures, cleaned ] = item;
const machines = _.uniq(failures.map(f => f.builtOn)).join(', ');
cli.table('Reason', key);
cli.table('Type', failures[0].type);
const prs = cleaned
.map(f => {
const parsed = parsePRFromURL(f.source);
return parsed ? `#${parsed.prid}` : f.source;
})
.join(', ');
cli.table('Failed PR', `${cleaned.length} (${prs})`);
cli.table('Appeared', machines);
if (cleaned.length > 1) {
cli.table('First CI', `${cleaned[0].upstream}`);
}
cli.table('Last CI', `${cleaned[cleaned.length - 1].upstream}`);
cli.separator();
}
}

async function main(command, argv) {
const cli = new CLI();
const credentials = await auth({
Expand All @@ -267,7 +211,7 @@ async function main(command, argv) {
const type = commandToType[argv.type];
const builds = await listBuilds(cli, request, type);
if (command === 'walk') {
for (const build of builds.failed) {
for (const build of builds.failed.slice(0, argv.limit)) {
queue.push(build);
}
} else {
Expand Down Expand Up @@ -304,8 +248,29 @@ async function main(command, argv) {

if (queue.length > 0) {
const data = await runQueue(queue, cli, request, argv);

if (command === 'walk' && argv.stats) {
aggregateFailures(cli, data);
const aggregator = new FailureAggregator(cli, data.json);
data.json = aggregator.aggregate();
cli.log('');
cli.separator('Stats');
cli.log('');
aggregator.display();
if (argv.copy) {
data.copy = aggregator.formatAsMarkdown();
}
}

if (argv.copy) {
clipboardy.writeSync(data.copy);
cli.separator('');
cli.log(`Written markdown to clipboard`);
}

if (argv.json) {
writeJson(argv.json, data.json);
cli.separator('');
cli.log(`Written JSON to ${argv.json}`);
}
}
}
Expand Down
7 changes: 7 additions & 0 deletions lib/ci/ci_failure_parser.js
Original file line number Diff line number Diff line change
Expand Up @@ -250,4 +250,11 @@ CIFailureParser.FAILURE_CONSTRUCTORS = {
GIT_FAILURE: GitFailure
};
CIFailureParser.CIResult = CIResult;
CIFailureParser.FAILURE_TYPES_NAME = {
BUILD_FAILURE: 'Build Failure',
JENKINS_FAILURE: 'Jenkins Failure',
JS_TEST_FAILURE: 'JSTest Failure',
CC_TEST_FAILURE: 'CCTest Failure',
GIT_FAILURE: 'Git Failure'
};
module.exports = CIFailureParser;
127 changes: 126 additions & 1 deletion lib/ci/ci_result_parser.js
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
'use strict';

const {
parsePRFromURL
} = require('../links');
const Cache = require('../cache');
const CIFailureParser = require('./ci_failure_parser');
const {
Expand All @@ -9,14 +12,16 @@ const {
FAILURE_CONSTRUCTORS: {
[BUILD_FAILURE]: BuildFailure
},
CIResult
CIResult,
FAILURE_TYPES_NAME
} = CIFailureParser;
const {
CI_DOMAIN,
parseJobFromURL,
CI_TYPES
} = require('./ci_type_parser');
const qs = require('querystring');
const _ = require('lodash');
const chalk = require('chalk');

const SUCCESS = 'SUCCESS';
Expand Down Expand Up @@ -325,6 +330,125 @@ class TestBuild extends Job {
}
}

function getHighlight(f) {
return f.reason.split('\n')[f.highlight]
.replace(/not ok \d+ /, '')
.replace(
/'JNLP4-connect connection from .+?'/, 'JNLP4-connect connection from ...'
)
.replace(/FATAL: Could not checkout \w+/, 'FATAL: Could not checkout ...');
}

function markdownRow(...args) {
let result = '';
for (const item of args) {
result += `| ${item} `;
}
return result + '|\n';
}

class FailureAggregator {
constructor(cli, failures) {
this.cli = cli;
this.failures = failures;
this.aggregates = null;
}

aggregate() {
const failures = this.failures;
const groupedByReason = _.chain(failures)
.groupBy(getHighlight)
.toPairs()
.sortBy(0)
.value();
const data = [];
for (const item of groupedByReason) {
const [ reason, failures ] = item;
// If multiple sub builds of one PR are failed by the same reason,
// we'll only take one of those builds, as that might be a genuine failure
const prs = _.chain(failures)
.uniqBy('source')
.sortBy((f) => parseJobFromURL(f.upstream).jobid)
.map((item) => ({ source: item.source, upstream: item.upstream }))
.value();
const machines = _.uniq(failures.map(f => f.builtOn));
data.push({
reason, type: failures[0].type, failures, prs, machines
});
};

const groupedByType = _.groupBy(data, 'type');
for (const type of Object.keys(groupedByType)) {
groupedByType[type] =
_.sortBy(groupedByType[type], r => 0 - (r.prs.length));
}
this.aggregates = groupedByType;
return groupedByType;
}

formatAsMarkdown() {
let { aggregates } = this;
if (!aggregates) {
aggregates = this.aggregates = this.aggregate();
}

let output = '';
for (const type of Object.keys(aggregates)) {
output += `\n### ${FAILURE_TYPES_NAME[type]}\n\n`;
for (const item of aggregates[type]) {
const { reason, type, prs, failures, machines } = item;
if (prs.length < 2) { continue; }
output += markdownRow('Reason', `\`${reason}\``);
output += markdownRow('-', ':-');
output += markdownRow('Type', type);
const source = prs.map(f => f.source);
output += markdownRow(
'Failed PR', `${source.length} (${source.join(', ')})`
);
output += markdownRow('Appeared', machines.join(', '));
if (prs.length > 1) {
output += markdownRow('First CI', `${prs[0].upstream}`);
}
output += markdownRow('Last CI', `${prs[prs.length - 1].upstream}`);
output += '\n' + fold('Example', failures[0].reason) + '\n';
output += '\n-------\n\n';
}
}
return output;
}

display() {
let { cli, aggregates } = this;
if (!aggregates) {
aggregates = this.aggregates = this.aggregate();
}

for (const type of Object.keys(aggregates)) {
cli.separator(type);
for (const item of aggregates[type]) {
const { reason, type, prs, failures, machines } = item;
cli.table('Reason', reason);
cli.table('Type', type);
const source = prs
.map(f => {
const parsed = parsePRFromURL(f.source);
return parsed ? `#${parsed.prid}` : f.source;
});
cli.table('Failed PR', `${source.length} (${source.join(', ')})`);
cli.table('Appeared', machines.join(', '));
if (prs.length > 1) {
cli.table('First CI', `${prs[0].upstream}`);
}
cli.table('Last CI', `${prs[prs.length - 1].upstream}`);
cli.log('\n' + chalk.bold('Example:') + '\n');
const example = failures[0].reason;
cli.log(example.length > 512 ? example.slice(0, 512) + '...' : example);
cli.separator();
}
}
}
}

class CommitBuild extends TestBuild {
constructor(cli, request, id) {
const path = `job/node-test-commit/${id}/`;
Expand Down Expand Up @@ -714,6 +838,7 @@ class BenchmarkRun extends Job {
}

module.exports = {
FailureAggregator,
PRBuild,
BenchmarkRun,
CommitBuild,
Expand Down

0 comments on commit a3ed0ec

Please sign in to comment.