-
Notifications
You must be signed in to change notification settings - Fork 9.4k
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
core(preload): use lantern to compute savings #5062
Changes from 1 commit
badbe11
51238e7
1a03349
08fa322
9d0f995
0dff54d
1b3c2a7
a2ef139
2ec133c
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -50,55 +50,118 @@ class UsesRelPreloadAudit extends Audit { | |
return requests; | ||
} | ||
|
||
/** | ||
* Computes the estimated effect of preloading all the resources. | ||
* @param {Set<string>} urls The array of byte savings results per resource | ||
* @param {Node} graph | ||
* @param {Simulator} simulator | ||
* @param {LH.WebInspector.NetworkRequest} mainResource | ||
* @return {{wastedMs: number, results: Array<{url: string, wastedMs: number}>}} | ||
*/ | ||
static computeWasteWithGraph(urls, graph, simulator, mainResource) { | ||
if (!urls.size) { | ||
return {wastedMs: 0, results: []}; | ||
} | ||
|
||
const simulationBeforeChanges = simulator.simulate(graph, {ignoreObserved: true}); | ||
|
||
const modifiedGraph = graph.cloneWithRelationships(); | ||
|
||
const nodesToPreload = []; | ||
/** @type {Node|null} */ | ||
let mainDocumentNode = null; | ||
modifiedGraph.traverse(node => { | ||
if (node.record && urls.has(node.record.url)) { | ||
nodesToPreload.push(node); | ||
} | ||
|
||
if (node.record && node.record.url === mainResource.url) { | ||
mainDocumentNode = node; | ||
} | ||
}); | ||
|
||
if (!mainDocumentNode) { | ||
// Should always find the main document node | ||
throw new Error('Could not find main document node'); | ||
} | ||
|
||
// Preload has the effect of moving the resource's only dependency to the main HTML document | ||
// Remove all dependencies of the nodes | ||
for (const node of nodesToPreload) { | ||
node.removeAllDependencies(); | ||
node.addDependency(mainDocumentNode); | ||
} | ||
|
||
const simulationAfterChanges = simulator.simulate(modifiedGraph, {ignoreObserved: true}); | ||
const originalNodesByRecord = Array.from(simulationBeforeChanges.nodeTimings.keys()) | ||
.reduce((map, node) => map.set(node.record, node), new Map()); | ||
|
||
const results = []; | ||
for (const node of nodesToPreload) { | ||
const originalNode = originalNodesByRecord.get(node.record); | ||
const timingAfter = simulationAfterChanges.nodeTimings.get(node); | ||
const timingBefore = simulationBeforeChanges.nodeTimings.get(originalNode); | ||
const wastedMs = Math.round(timingBefore.endTime - timingAfter.endTime); | ||
if (wastedMs < THRESHOLD_IN_MS) continue; | ||
results.push({url: node.record.url, wastedMs}); | ||
} | ||
|
||
if (!results.length) { | ||
return {wastedMs: 0, results}; | ||
} | ||
|
||
return { | ||
// Preload won't necessarily impact the deepest chain/overall time | ||
// We'll use the maximum endTime improvement for now | ||
wastedMs: Math.max(...results.map(item => item.wastedMs)), | ||
results, | ||
}; | ||
} | ||
|
||
/** | ||
* @param {!Artifacts} artifacts | ||
* @return {!AuditResult} | ||
*/ | ||
static audit(artifacts) { | ||
const devtoolsLogs = artifacts.devtoolsLogs[UsesRelPreloadAudit.DEFAULT_PASS]; | ||
static audit(artifacts, context) { | ||
const trace = artifacts.traces[UsesRelPreloadAudit.DEFAULT_PASS]; | ||
const devtoolsLog = artifacts.devtoolsLogs[UsesRelPreloadAudit.DEFAULT_PASS]; | ||
const simulatorOptions = {trace, devtoolsLog, settings: context.settings}; | ||
|
||
return Promise.all([ | ||
artifacts.requestCriticalRequestChains(devtoolsLogs), | ||
artifacts.requestMainResource(devtoolsLogs), | ||
]).then(([critChains, mainResource]) => { | ||
const results = []; | ||
let maxWasted = 0; | ||
// TODO(phulce): eliminate dependency on CRC | ||
artifacts.requestCriticalRequestChains(devtoolsLog), | ||
artifacts.requestMainResource(devtoolsLog), | ||
artifacts.requestPageDependencyGraph({trace, devtoolsLog}), | ||
artifacts.requestLoadSimulator(simulatorOptions), | ||
]).then(([critChains, mainResource, graph, simulator]) => { | ||
// get all critical requests 2 + mainResourceIndex levels deep | ||
const mainResourceIndex = mainResource.redirects ? mainResource.redirects.length : 0; | ||
|
||
const criticalRequests = UsesRelPreloadAudit._flattenRequests(critChains, | ||
3 + mainResourceIndex, 2 + mainResourceIndex); | ||
criticalRequests.forEach(request => { | ||
const networkRecord = request; | ||
const urls = new Set(); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I think you'll need an explicit There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. done |
||
for (const networkRecord of criticalRequests) { | ||
if (!networkRecord._isLinkPreload && networkRecord.protocol !== 'data') { | ||
// calculate time between mainresource.endTime and resource start time | ||
const wastedMs = Math.min(request._startTime - mainResource._endTime, | ||
request._endTime - request._startTime) * 1000; | ||
|
||
if (wastedMs >= THRESHOLD_IN_MS) { | ||
maxWasted = Math.max(wastedMs, maxWasted); | ||
results.push({ | ||
url: request.url, | ||
wastedMs: Util.formatMilliseconds(wastedMs), | ||
}); | ||
} | ||
urls.add(networkRecord._url); | ||
} | ||
}); | ||
} | ||
|
||
const {results, wastedMs} = UsesRelPreloadAudit.computeWasteWithGraph(urls, graph, simulator, | ||
mainResource); | ||
// sort results by wastedTime DESC | ||
results.sort((a, b) => b.wastedMs - a.wastedMs); | ||
|
||
const headings = [ | ||
{key: 'url', itemType: 'url', text: 'URL'}, | ||
{key: 'wastedMs', itemType: 'text', text: 'Potential Savings'}, | ||
{key: 'wastedMs', itemType: 'ms', text: 'Potential Savings', granularity: 10}, | ||
]; | ||
const summary = {wastedMs: maxWasted}; | ||
const summary = {wastedMs}; | ||
const details = Audit.makeTableDetails(headings, results, summary); | ||
|
||
return { | ||
score: UnusedBytes.scoreForWastedMs(maxWasted), | ||
rawValue: maxWasted, | ||
displayValue: Util.formatMilliseconds(maxWasted), | ||
score: UnusedBytes.scoreForWastedMs(wastedMs), | ||
rawValue: wastedMs, | ||
displayValue: Util.formatMilliseconds(wastedMs), | ||
extendedInfo: { | ||
value: results, | ||
}, | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -82,15 +82,21 @@ module.exports = class ConnectionPool { | |
throw new Error(`Could not find a connection for origin: ${origin}`); | ||
} | ||
|
||
// Make sure each origin has 6 connections available | ||
// https://cs.chromium.org/chromium/src/net/socket/client_socket_pool_manager.cc?type=cs&q="int+g_max_sockets_per_group" | ||
while (connections.length < 6) connections.push(connections[0].clone()); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. pull out into a file level constant? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. done |
||
while (connections.length > 6) connections.pop(); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. how would we end up with >6? our network records indicate we have >6 even though that should be impossible? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. If Chrome killed connections and needed to reopen them, we can kill this line though |
||
|
||
this._connectionsByOrigin.set(origin, connections); | ||
} | ||
} | ||
|
||
/** | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. can we add a few lines to describe what's happening in here? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. done |
||
* @param {LH.WebInspector.NetworkRequest} record | ||
* @param {{ignoreObserved?: boolean}} options | ||
* @return {?TcpConnection} | ||
*/ | ||
acquire(record) { | ||
acquire(record, options = {}) { | ||
if (this._connectionsByRecord.has(record)) { | ||
// @ts-ignore | ||
return this._connectionsByRecord.get(record); | ||
|
@@ -99,16 +105,29 @@ module.exports = class ConnectionPool { | |
const origin = String(record.origin); | ||
/** @type {TcpConnection[]} */ | ||
const connections = this._connectionsByOrigin.get(origin) || []; | ||
const wasConnectionWarm = !!this._connectionReusedByRequestId.get(record.requestId); | ||
const connection = connections.find(connection => { | ||
const meetsWarmRequirement = wasConnectionWarm === connection.isWarm(); | ||
return meetsWarmRequirement && !this._connectionsInUse.has(connection); | ||
// Sort connections by decreasing congestion window, i.e. warmest to coldest | ||
const sortedConnections = connections.sort((a, b) => b.congestionWindow - a.congestionWindow); | ||
|
||
const availableWarmConnection = sortedConnections.find(connection => { | ||
return connection.isWarm() && !this._connectionsInUse.has(connection); | ||
}); | ||
const availableColdConnection = sortedConnections.find(connection => { | ||
return !connection.isWarm() && !this._connectionsInUse.has(connection); | ||
}); | ||
|
||
if (!connection) return null; | ||
this._connectionsInUse.add(connection); | ||
this._connectionsByRecord.set(record, connection); | ||
return connection; | ||
const needsColdConnection = !options.ignoreObserved && | ||
!this._connectionReusedByRequestId.get(record.requestId); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. meta point: it's really funky to be mixing this "connection reused" state that refers to observed network activity with what's "available" and "needed" for the simulation. I wish these were clearer |
||
const needsWarmConnection = !options.ignoreObserved && | ||
this._connectionReusedByRequestId.get(record.requestId); | ||
|
||
let connectionToUse = availableWarmConnection || availableColdConnection; | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. still not sure exactly what logic you're trying to apply here, but it does look odd there's no |
||
if (needsColdConnection) connectionToUse = availableColdConnection; | ||
if (needsWarmConnection) connectionToUse = availableWarmConnection; | ||
if (!connectionToUse) return null; | ||
|
||
this._connectionsInUse.add(connectionToUse); | ||
this._connectionsByRecord.set(record, connectionToUse); | ||
return connectionToUse; | ||
} | ||
|
||
/** | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -52,6 +52,7 @@ class Simulator { | |
this._layoutTaskMultiplier = this._cpuSlowdownMultiplier * this._options.layoutTaskMultiplier; | ||
|
||
// Properties reset on every `.simulate` call but duplicated here for type checking | ||
this._ignoreObserved = false; | ||
this._nodeTimings = new Map(); | ||
this._numberInProgressByType = new Map(); | ||
this._nodes = {}; | ||
|
@@ -147,6 +148,16 @@ class Simulator { | |
} | ||
} | ||
|
||
/** | ||
* @param {LH.WebInspector.NetworkRequest} record | ||
* @return {?TcpConnection} | ||
*/ | ||
_acquireConnection(record) { | ||
return this._connectionPool.acquire(record, { | ||
ignoreObserved: this._ignoreObserved, | ||
}); | ||
} | ||
|
||
/** | ||
* @param {Node} node | ||
* @param {number} totalElapsedTime | ||
|
@@ -167,7 +178,7 @@ class Simulator { | |
// Start a network request if we're not at max requests and a connection is available | ||
const numberOfActiveRequests = this._numberInProgress(node.type); | ||
if (numberOfActiveRequests >= this._maximumConcurrentRequests) return; | ||
const connection = this._connectionPool.acquire(/** @type {NetworkNode} */ (node).record); | ||
const connection = this._acquireConnection(/** @type {NetworkNode} */ (node).record); | ||
if (!connection) return; | ||
|
||
this._markNodeAsInProgress(node, totalElapsedTime); | ||
|
@@ -212,7 +223,7 @@ class Simulator { | |
|
||
const record = /** @type {NetworkNode} */ (node).record; | ||
const timingData = this._nodeTimings.get(node); | ||
const connection = /** @type {TcpConnection} */ (this._connectionPool.acquire(record)); | ||
const connection = /** @type {TcpConnection} */ (this._acquireConnection(record)); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. how is this known not-null at this point? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. commented |
||
const calculation = connection.simulateDownloadUntil( | ||
record.transferSize - timingData.bytesDownloaded, | ||
{timeAlreadyElapsed: timingData.timeElapsed, maximumTimeToElapse: Infinity} | ||
|
@@ -255,7 +266,7 @@ class Simulator { | |
if (node.type !== Node.TYPES.NETWORK) throw new Error('Unsupported'); | ||
|
||
const record = /** @type {NetworkNode} */ (node).record; | ||
const connection = /** @type {TcpConnection} */ (this._connectionPool.acquire(record)); | ||
const connection = /** @type {TcpConnection} */ (this._acquireConnection(record)); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. also never null? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. commented |
||
const calculation = connection.simulateDownloadUntil( | ||
record.transferSize - timingData.bytesDownloaded, | ||
{ | ||
|
@@ -281,10 +292,13 @@ class Simulator { | |
/** | ||
* Estimates the time taken to process all of the graph's nodes. | ||
* @param {Node} graph | ||
* @param {{ignoreObserved?: boolean}=} options | ||
* @return {LH.Gatherer.Simulation.Result} | ||
*/ | ||
simulate(graph) { | ||
simulate(graph, options) { | ||
options = Object.assign({ignoreObserved: false}, options); | ||
// initialize the necessary data containers | ||
this._ignoreObserved = options.ignoreObserved; | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I'm really not sure what to call this thing, basically it's "don't try and stick just to how the page loaded in the trace, feel free to load it as efficiently as you think possible" There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. yeah i don't love this term. :) as you know i don't love reusing it in different places in diff contexts. is |
||
this._initializeConnectionPool(graph); | ||
this._initializeAuxiliaryData(); | ||
|
||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -78,6 +78,20 @@ class TcpConnection { | |
return this._warmed; | ||
} | ||
|
||
/** | ||
* @return {boolean} | ||
*/ | ||
isH2() { | ||
return this._h2; | ||
} | ||
|
||
/** | ||
* @return {number} | ||
*/ | ||
get congestionWindow() { | ||
return this._congestionWindow; | ||
} | ||
|
||
/** | ||
* Sets the number of excess bytes that are available to this connection on future downloads, only | ||
* applies to H2 connections. | ||
|
@@ -88,6 +102,14 @@ class TcpConnection { | |
this._h2OverflowBytesDownloaded = bytes; | ||
} | ||
|
||
/** | ||
* @return {TcpConnection} | ||
*/ | ||
clone() { | ||
// @ts-ignore | ||
return Object.assign(new TcpConnection(), this); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I'd probably personally prefer the long way of setting these, but if you prefer this way, There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. done 👍 |
||
} | ||
|
||
/** | ||
* Simulates a network download of a particular number of bytes over an optional maximum amount of time | ||
* and returns information about the ending state. | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
seems like it gets most of the way to an opportunity but then stops here? Is measuring impact on TTI or whatever problematic?
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Yeah fair, I was mostly retaining parity with the old way, but we can do max(TTI, load) like the others 👍
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
having had the hour to think about it, I've reconvinced myself that this is a bad idea and we should keep it to max(...wastedMs), our selection of preloaded items is fairly narrow and in a busy page it will rarely impact the longest chain
given our super ambiguous no-longer-just-on-a-particular-metric approach, it seems OK for now