Skip to content

Commit

Permalink
filterPasses -> validatePasses (#608)
Browse files Browse the repository at this point in the history
* filterPasses -> validatePasses. Don't mutate the config

* cleanup config constructor.

* remove audit whitelist.
  • Loading branch information
paulirish authored and brendankenny committed Aug 24, 2016
1 parent 5c72d72 commit 1fb77ae
Show file tree
Hide file tree
Showing 6 changed files with 56 additions and 157 deletions.
10 changes: 0 additions & 10 deletions lighthouse-cli/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,6 @@ const cli = yargs
'load-page',
'save-assets',
'save-artifacts',
'audit-whitelist',
'list-all-audits',
'list-trace-categories',
'config-path'
Expand All @@ -60,7 +59,6 @@ const cli = yargs
'load-page': 'Loads the page',
'save-assets': 'Save the trace contents & screenshots to disk',
'save-artifacts': 'Save all gathered artifacts to disk',
'audit-whitelist': 'Comma separated list of audits to run',
'list-all-audits': 'Prints a list of all available audits and exits',
'list-trace-categories': 'Prints a list of all required trace categories and exits',
'config-path': 'The absolute path to the config JSON.'
Expand Down Expand Up @@ -92,7 +90,6 @@ Example: --output-path=./lighthouse-results.html`
// default values
.default('mobile', true)
.default('load-page', true)
.default('audit-whitelist', 'all')
.default('output', Printer.OUTPUT_MODE.pretty)
.default('output-path', 'stdout')
.check(argv => {
Expand Down Expand Up @@ -138,13 +135,6 @@ if (cli.verbose) {
flags.logLevel = 'error';
}

// Normalize audit whitelist.
if (!flags.auditWhitelist || flags.auditWhitelist === 'all') {
flags.auditWhitelist = null;
} else {
flags.auditWhitelist = new Set(flags.auditWhitelist.split(',').map(a => a.toLowerCase()));
}

// kick off a lighthouse run
lighthouse(url, flags, config)
.then(results => Printer.write(results, outputMode, outputPath))
Expand Down
93 changes: 30 additions & 63 deletions lighthouse-core/config/config.js
Original file line number Diff line number Diff line change
Expand Up @@ -106,29 +106,22 @@ function cleanTrace(trace) {
return trace;
}

function filterPasses(passes, audits, rootPath) {
function validatePasses(passes, audits, rootPath) {
if (!Array.isArray(passes)) {
return;
}
const requiredGatherers = getGatherersNeededByAudits(audits);

// Make sure we only have the gatherers that are needed by the audits
// that have been listed in the config.
const filteredPasses = passes.map(pass => {
const freshPass = Object.assign({}, pass);

freshPass.gatherers = freshPass.gatherers.filter(gatherer => {
// Log if we are running gathers that are not needed by the audits listed in the config
passes.forEach(pass => {
pass.gatherers.forEach(gatherer => {
const GathererClass = GatherRunner.getGathererClass(gatherer, rootPath);
const isGatherRequiredByAudits = requiredGatherers.has(GathererClass.name);
if (isGatherRequiredByAudits === false) {
log.warn('config', `Skipping ${GathererClass.name} gatherer as no audit requires it.`);
log.warn('config', `${GathererClass.name} is included, however no audit requires it.`);
}
return isGatherRequiredByAudits;
});

return freshPass;
})

// Now remove any passes which no longer have gatherers.
.filter(p => p.gatherers.length > 0);
return filteredPasses;
});
}

function getGatherersNeededByAudits(audits) {
Expand All @@ -144,33 +137,10 @@ function getGatherersNeededByAudits(audits) {
}, new Set());
}

function filterAudits(audits, auditWhitelist) {
// If there is no whitelist, assume all.
if (!auditWhitelist) {
return Array.from(audits);
}

const rejected = [];
const filteredAudits = audits.filter(a => {
const auditName = a.toLowerCase();
const inWhitelist = auditWhitelist.has(auditName);

if (!inWhitelist) {
rejected.push(auditName);
}

return inWhitelist;
});

if (rejected.length) {
log.log('info', 'Running these audits:', `${filteredAudits.join(', ')}`);
log.log('info', 'Skipping these audits:', `${rejected.join(', ')}`);
function requireAudits(audits, rootPath) {
if (!audits) {
return null;
}

return filteredAudits;
}

function expandAudits(audits, rootPath) {
const Runner = require('../runner');

return audits.map(audit => {
Expand Down Expand Up @@ -247,8 +217,9 @@ function assertValidAudit(audit, auditDefinition) {
}

function expandArtifacts(artifacts) {
const expandedArtifacts = Object.assign({}, artifacts);

if (!artifacts) {
return null;
}
// currently only trace logs and performance logs should be imported
if (artifacts.traces) {
Object.keys(artifacts.traces).forEach(key => {
Expand All @@ -264,15 +235,14 @@ function expandArtifacts(artifacts) {
}
trace = cleanTrace(trace);

expandedArtifacts.traces[key] = trace;
artifacts.traces[key] = trace;
});
}

if (artifacts.performanceLog) {
expandedArtifacts.networkRecords = recordsFromLogs(require(artifacts.performanceLog));
artifacts.networkRecords = recordsFromLogs(require(artifacts.performanceLog));
}

return expandedArtifacts;
return artifacts;
}

/**
Expand All @@ -283,27 +253,24 @@ class Config {
* @constructor
* @param{Object} config
*/
constructor(configJSON, auditWhitelist, configPath) {
constructor(configJSON, configPath) {
if (!configJSON) {
configJSON = defaultConfig;
}

// We don't want to mutate the original config object
configJSON = JSON.parse(JSON.stringify(configJSON));
// Store the directory of the config path, if one was provided.
this._configDir = configPath ? path.dirname(configPath) : undefined;

this._audits = configJSON.audits ? expandAudits(
filterAudits(configJSON.audits, auditWhitelist), this._configDir
) : null;
// filterPasses expects audits to have been expanded
this._passes = configJSON.passes ?
filterPasses(configJSON.passes, this._audits, this._configDir) :
null;
this._auditResults = configJSON.auditResults ? Array.from(configJSON.auditResults) : null;
this._artifacts = null;
if (configJSON.artifacts) {
this._artifacts = expandArtifacts(configJSON.artifacts);
}
this._aggregations = configJSON.aggregations ? Array.from(configJSON.aggregations) : null;
this._passes = configJSON.passes || null;
this._auditResults = configJSON.auditResults || null;
this._aggregations = configJSON.aggregations || null;

this._audits = requireAudits(configJSON.audits, this._configDir);
this._artifacts = expandArtifacts(configJSON.artifacts);

// validatePasses must follow after audits are required
validatePasses(configJSON.passes, this._audits, this._configDir);
}

/** @type {string} */
Expand Down
7 changes: 1 addition & 6 deletions lighthouse-core/config/perf.example-custom.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,7 @@
"network": true,
"trace": true,
"loadPage": true,
"gatherers": [
"url",
"critical-request-chains",
"screenshots",
"speedline"
]
"gatherers": []
}],

"audits": [
Expand Down
41 changes: 8 additions & 33 deletions lighthouse-core/test/config/config-test.js
Original file line number Diff line number Diff line change
Expand Up @@ -51,22 +51,6 @@ describe('Config', () => {
/Unable to locate/);
});

it('filters gatherers from passes when no audits require them', () => {
const config = new Config({
passes: [{
gatherers: [
'url',
'html',
'viewport'
]
}],

audits: ['viewport']
});

assert.equal(config.passes[0].gatherers.length, 1);
});

it('doesn\'t mutate old gatherers when filtering passes', () => {
const configJSON = {
passes: [{
Expand Down Expand Up @@ -99,22 +83,13 @@ describe('Config', () => {
}];

const config = new Config(configJSON);
assert.notEqual(config, configJSON);
assert.ok(config.aggregations);
assert.ok(config.auditResults);
assert.deepStrictEqual(config.aggregations, configJSON.aggregations);
assert.notEqual(config.aggregations, configJSON.aggregations);
assert.notEqual(config.auditResults, configJSON.auditResults);
assert.deepStrictEqual(config.auditResults, configJSON.auditResults);
});

it('returns filtered audits when a whitelist is given', () => {
const config = new Config({
audits: ['is-on-https']
}, new Set(['b']));

assert.ok(Array.isArray(config.audits));
return assert.equal(config.audits.length, 0);
assert.notEqual(config, configJSON, 'Objects are strictly different');
assert.ok(config.aggregations, 'Aggregations array exists');
assert.ok(config.auditResults, 'Audits array exists');
assert.deepStrictEqual(config.aggregations, configJSON.aggregations, 'Aggregations match');
assert.notEqual(config.aggregations, configJSON.aggregations, 'Aggregations not same object');
assert.notEqual(config.auditResults, configJSON.auditResults, 'Audits not same object');
assert.deepStrictEqual(config.auditResults, configJSON.auditResults, 'Audits match');
});

it('expands audits', () => {
Expand All @@ -136,7 +111,7 @@ describe('Config', () => {
it('loads an audit relative to a config', () => {
return assert.doesNotThrow(_ => new Config({
audits: ['../fixtures/valid-custom-audit']
}, null, __filename));
}, __filename));
});

it('throws when it finds invalid audits', () => {
Expand Down
Loading

0 comments on commit 1fb77ae

Please sign in to comment.