diff --git a/core/audits/dobetterweb/uses-http2.js b/core/audits/dobetterweb/uses-http2.js index 1102c0aa1fa8..81ef8d474278 100644 --- a/core/audits/dobetterweb/uses-http2.js +++ b/core/audits/dobetterweb/uses-http2.js @@ -152,16 +152,19 @@ class UsesHTTP2Audit extends Audit { * @param {LH.Artifacts.EntityClassification} classifiedEntities * @return {boolean} */ - static isStaticAsset(networkRequest, classifiedEntities) { + static isMultiplexableStaticAsset(networkRequest, classifiedEntities) { if (!STATIC_RESOURCE_TYPES.has(networkRequest.resourceType)) return false; // Resources from third-parties that are less than 100 bytes are usually tracking pixels, not actual resources. // They can masquerade as static types though (gifs, documents, etc) if (networkRequest.resourceSize < 100) { - // This logic needs to be revisited. - // See https://github.com/GoogleChrome/lighthouse/issues/14661 const entity = classifiedEntities.entityByUrl.get(networkRequest.url); - if (entity && !entity.isUnrecognized) return false; + if (entity) { + // Third-party assets are multiplexable in their first-party context. + if (classifiedEntities.firstParty?.name === entity.name) return true; + // Skip recognizable third-parties' requests. + if (!entity.isUnrecognized) return false; + } } return true; @@ -199,7 +202,7 @@ class UsesHTTP2Audit extends Audit { /** @type {Map>} */ const groupedByOrigin = new Map(); for (const record of networkRecords) { - if (!UsesHTTP2Audit.isStaticAsset(record, classifiedEntities)) continue; + if (!UsesHTTP2Audit.isMultiplexableStaticAsset(record, classifiedEntities)) continue; if (UrlUtils.isLikeLocalhost(record.parsedURL.host)) continue; const existing = groupedByOrigin.get(record.parsedURL.securityOrigin) || []; existing.push(record); diff --git a/core/test/audits/dobetterweb/uses-http2-test.js b/core/test/audits/dobetterweb/uses-http2-test.js index d465fe410c34..aeba19f9b288 100644 --- a/core/test/audits/dobetterweb/uses-http2-test.js +++ b/core/test/audits/dobetterweb/uses-http2-test.js @@ -265,4 +265,58 @@ describe('Resources are fetched over http/2', () => { expect(result.score).toEqual(0); expect(result.metricSavings).toBeUndefined(); }); + + it('should identify multiplexable assets when run on recognizable 3p origins', async () => { + const networkRecords = [ + { + url: 'https://www.twitter.com/', + priority: 'High', + protocol: 'HTTP/1.1', + }, + { + url: 'https://www.twitter.com/2', + priority: 'High', + protocol: 'HTTP/1.1', + }, + { + url: 'https://www.twitter.com/3', + priority: 'High', + protocol: 'HTTP/1.1', + }, + { + url: 'https://www.twitter.com/4', + priority: 'High', + protocol: 'HTTP/1.1', + }, + { + url: 'https://www.twitter.com/5', + priority: 'High', + protocol: 'HTTP/1.1', + }, + { + url: 'https://www.twitter.com/embed/foo', + priority: 'High', + protocol: 'HTTP/1.1', + }, + { + url: 'https://www.facebook.com/embed', + protocol: 'HTTP/1.1', + priority: 'High', + }, + ]; + const artifacts = buildArtifacts(networkRecords); + artifacts.devtoolsLogs.defaultPass = networkRecordsToDevtoolsLog(networkRecords); + + const result = await UsesHTTP2Audit.audit(artifacts, context); + const urls = new Set(result.details.items.map(item => item.url)); + const hosts = new Set(result.details.items.map(item => new URL(item.url).host)); + + // Make sure we don't pull in actual 3p domains. + expect(hosts).toEqual(new Set(['www.twitter.com'])); + + // Make sure we dont flag the 3rd party request for multiplexing. + expect(urls).not.toContain('https://www.facebook.com/embed'); + + expect(result.details.items).toHaveLength(6); + }); });