Skip to content

Commit

Permalink
core(lantern): more flexible graph edge creation (#4933)
Browse files Browse the repository at this point in the history
Also disable CPU throttling for Appveyor
  • Loading branch information
patrickhulce authored and paulirish committed Apr 10, 2018
1 parent a031e46 commit a6a9fc1
Show file tree
Hide file tree
Showing 6 changed files with 94 additions and 15 deletions.
2 changes: 1 addition & 1 deletion lighthouse-cli/test/fixtures/byte-efficiency/script.js

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 3 additions & 3 deletions lighthouse-cli/test/fixtures/byte-efficiency/tester.html
Original file line number Diff line number Diff line change
Expand Up @@ -32,9 +32,9 @@
document.head.appendChild(style);
}

function longTask() {
function longTask(length = 75) {
const start = Date.now();
while (Date.now() < start + 100) ;
while (Date.now() < start + length) ;
}

// Add a long task to delay FI
Expand All @@ -53,6 +53,7 @@
left: 0;
}
</style>
<script src="script.js"></script>
</head>

<body>
Expand Down Expand Up @@ -165,7 +166,6 @@ <h2>Byte efficiency tester page</h2>

<!-- Ensure the page takes at least 7 seconds and we don't exit before the lazily loaded image -->
<script src="delay-complete.js?delay=7000" async></script>
<script src="script.js"></script>
<script>
// Used block #1
// FILLER DATA JUST TO OCCUPY BYTES -- FILLER DATA JUST TO OCCUPY BYTES -- FILLER DATA JUST TO OCCUPY BYTES -- FILLER DATA JUST TO OCCUPY BYTES -- FILLER DATA JUST TO OCCUPY BYTES -- FILLER DATA JUST TO OCCUPY BYTES -- FILLER DATA JUST TO OCCUPY BYTES -- FILLER DATA JUST TO OCCUPY BYTES -- FILLER DATA JUST TO OCCUPY BYTES -- FILLER DATA JUST TO OCCUPY BYTES -- FILLER DATA JUST TO OCCUPY BYTES -- FILLER DATA JUST TO OCCUPY BYTES -- FILLER DATA JUST TO OCCUPY BYTES -- FILLER DATA JUST TO OCCUPY BYTES -- FILLER DATA JUST TO OCCUPY BYTES -- FILLER DATA JUST TO OCCUPY BYTES -- FILLER DATA JUST TO OCCUPY BYTES -- FILLER DATA JUST TO OCCUPY BYTES -- FILLER DATA JUST TO OCCUPY BYTES -- FILLER DATA JUST TO OCCUPY BYTES -- FILLER DATA JUST TO OCCUPY BYTES -- FILLER DATA JUST TO OCCUPY BYTES -- FILLER DATA JUST TO OCCUPY BYTES -- FILLER DATA JUST TO OCCUPY BYTES -- FILLER DATA JUST TO OCCUPY BYTES -- FILLER DATA JUST TO OCCUPY BYTES -- FILLER DATA JUST TO OCCUPY BYTES -- FILLER DATA JUST TO OCCUPY BYTES -- FILLER DATA JUST TO OCCUPY BYTES -- FILLER DATA JUST TO OCCUPY BYTES -- FILLER DATA JUST TO OCCUPY BYTES -- FILLER DATA JUST TO OCCUPY BYTES -- FILLER DATA JUST TO OCCUPY BYTES -- FILLER DATA JUST TO OCCUPY BYTES -- FILLER DATA JUST TO OCCUPY BYTES -- FILLER DATA JUST TO OCCUPY BYTES
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,15 +53,14 @@ module.exports = [
extendedInfo: {
value: {
wastedKb: '>=25',
wastedMs: '>500',
wastedMs: '>300',
results: {
length: 2,
},
},
},
},
'offscreen-images': {
score: '<1', // big enough savings to interfere with download of script.js
extendedInfo: {
value: {
results: [
Expand All @@ -79,18 +78,17 @@ module.exports = [
},
},
'uses-webp-images': {
score: '<1', // big enough savings to interfere with download of script.js
extendedInfo: {
value: {
wastedKb: '>60',
wastedMs: '>200',
results: {
length: 4,
},
},
},
},
'uses-text-compression': {
score: '<1',
extendedInfo: {
value: {
wastedMs: '>700',
Expand All @@ -115,7 +113,6 @@ module.exports = [
extendedInfo: {
value: {
wastedKb: '>50',
wastedMs: '>100',
results: {
length: 3,
},
Expand Down
35 changes: 32 additions & 3 deletions lighthouse-cli/test/smokehouse/smokehouse.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@

/* eslint-disable no-console */

const fs = require('fs');
const path = require('path');
const spawnSync = require('child_process').spawnSync;
const yargs = require('yargs');
Expand Down Expand Up @@ -43,19 +44,34 @@ function resolveLocalOrCwd(payloadPath) {
* Launch Chrome and do a full Lighthouse run.
* @param {string} url
* @param {string} configPath
* @param {boolean=} isDebug
* @return {!LighthouseResults}
*/
function runLighthouse(url, configPath) {
function runLighthouse(url, configPath, isDebug) {
isDebug = isDebug || process.env.SMOKEHOUSE_DEBUG;

const command = 'node';
const outputPath = `smokehouse-${Math.round(Math.random() * 100000)}.report.json`;
const args = [
'lighthouse-cli/index.js',
url,
`--config-path=${configPath}`,
`--output-path=${outputPath}`,
'--output=json',
'--quiet',
'--port=0',
];

if (isDebug) {
args.push('-GA');
}

if (process.env.APPVEYOR) {
// Appveyor is hella slow already, disable CPU throttling so we're not 16x slowdown
// see https://github.com/GoogleChrome/lighthouse/issues/4891
args.push('--disable-cpu-throttling');
}

// Lighthouse sometimes times out waiting to for a connection to Chrome in CI.
// Watch for this error and retry relaunching Chrome and running Lighthouse up
// to RETRIES times. See https://github.com/GoogleChrome/lighthouse/issues/833
Expand All @@ -80,7 +96,19 @@ function runLighthouse(url, configPath) {
process.exit(runResults.status);
}

return JSON.parse(runResults.stdout);
if (isDebug) {
console.log(`STDOUT: ${runResults.stdout}`);
console.error(`STDERR: ${runResults.stderr}`);
}

const lhr = fs.readFileSync(outputPath, 'utf8');
if (isDebug) {
console.log('LHR output available at: ', outputPath);
} else {
fs.unlinkSync(outputPath);
}

return JSON.parse(lhr);
}

/**
Expand Down Expand Up @@ -275,6 +303,7 @@ const cli = yargs
.describe({
'config-path': 'The path to the config JSON file',
'expectations-path': 'The path to the expected audit results file',
'debug': 'Save the artifacts along with the output',
})
.default('config-path', DEFAULT_CONFIG_PATH)
.default('expectations-path', DEFAULT_EXPECTATIONS_PATH)
Expand All @@ -289,7 +318,7 @@ let passingCount = 0;
let failingCount = 0;
expectations.forEach(expected => {
console.log(`Checking '${expected.initialUrl}'...`);
const results = runLighthouse(expected.initialUrl, configPath);
const results = runLighthouse(expected.initialUrl, configPath, cli.debug);
const collated = collateResults(results, expected);
const counts = report(collated);
passingCount += counts.passed;
Expand Down
15 changes: 13 additions & 2 deletions lighthouse-core/gather/computed/page-dependency-graph.js
Original file line number Diff line number Diff line change
Expand Up @@ -145,20 +145,31 @@ class PageDependencyGraphArtifact extends ComputedArtifact {
function addDependentNetworkRequest(cpuNode, reqId) {
const networkNode = networkNodeOutput.idToNodeMap.get(reqId);
if (!networkNode ||
networkNode.record._resourceType !== WebInspector.resourceTypes.XHR) return;
// Ignore all non-XHRs
networkNode.record._resourceType !== WebInspector.resourceTypes.XHR ||
// Ignore all network nodes that started before this CPU task started
// A network request that started earlier could not possibly have been started by this task
networkNode.startTime <= cpuNode.startTime) return;
cpuNode.addDependent(networkNode);
}

function addDependencyOnUrl(cpuNode, url) {
if (!url) return;
// Allow network requests that end up to 100ms before the task started
// Some script evaluations can start before the script finishes downloading
const minimumAllowableTimeSinceNetworkNodeEnd = -100 * 1000;
const candidates = networkNodeOutput.urlToNodeMap.get(url) || [];

let minCandidate = null;
let minDistance = Infinity;
// Find the closest request that finished before this CPU task started
candidates.forEach(candidate => {
// Explicitly ignore all requests that started after this CPU node
// A network request that started after this task started cannot possibly be a dependency
if (cpuNode.startTime <= candidate.startTime) return;

const distance = cpuNode.startTime - candidate.endTime;
if (distance > 0 && distance < minDistance) {
if (distance >= minimumAllowableTimeSinceNetworkNodeEnd && distance < minDistance) {
minCandidate = candidate;
minDistance = distance;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ const assert = require('assert');

function createRequest(requestId, url, startTime = 0, _initiator = null, _resourceType = null) {
startTime = startTime / 1000;
const endTime = startTime + 0.1;
const endTime = startTime + 0.05;
return {requestId, url, startTime, endTime, _initiator, _resourceType};
}

Expand Down Expand Up @@ -207,5 +207,47 @@ describe('PageDependencyGraph computed artifact:', () => {
assert.deepEqual(nodes[2].getDependencies(), [nodes[0]]);
assert.deepEqual(nodes[3].getDependencies(), [nodes[0]]); // should depend on rootNode instead
});

it('should be forgiving without cyclic dependencies', () => {
const request1 = createRequest(1, '1', 0);
const request2 = createRequest(2, '2', 250, null, WebInspector.resourceTypes.XHR);
const request3 = createRequest(3, '3', 210);
const request4 = createRequest(4, '4', 590);
const request5 = createRequest(5, '5', 595, null, WebInspector.resourceTypes.XHR);
const networkRecords = [request1, request2, request3, request4, request5];

addTaskEvents(200, 200, [
// CPU 1.2 should depend on Network 1
{name: 'EvaluateScript', data: {url: '1'}},

// Network 2 should depend on CPU 1.2, but 1.2 should not depend on Network 1
{name: 'ResourceSendRequest', data: {requestId: 2}},
{name: 'XHRReadyStateChange', data: {readyState: 4, url: '2'}},

// CPU 1.2 should not depend on Network 3 because it starts after CPU 1.2
{name: 'EvaluateScript', data: {url: '3'}},
]);

addTaskEvents(600, 150, [
// CPU 1.4 should depend on Network 4 even though it ends at 410ms
{name: 'InvalidateLayout', data: {stackTrace: [{url: '4'}]}},
// Network 5 should not depend on CPU 1.4 because it started before CPU 1.4
{name: 'ResourceSendRequest', data: {requestId: 5}},
]);

const graph = PageDependencyGraph.createGraph(traceOfTab, networkRecords);
const nodes = [];
graph.traverse(node => nodes.push(node));

const getDependencyIds = node => node.getDependencies().map(node => node.id);

assert.deepEqual(getDependencyIds(nodes[0]), []);
assert.deepEqual(getDependencyIds(nodes[1]), [1, '1.200000']);
assert.deepEqual(getDependencyIds(nodes[2]), [1]);
assert.deepEqual(getDependencyIds(nodes[3]), [1]);
assert.deepEqual(getDependencyIds(nodes[4]), [1]);
assert.deepEqual(getDependencyIds(nodes[5]), [1]);
assert.deepEqual(getDependencyIds(nodes[6]), [4]);
});
});
});

0 comments on commit a6a9fc1

Please sign in to comment.