From 58849a27d54ce7e0873a1fd28eaa4dc6f4231e01 Mon Sep 17 00:00:00 2001 From: afenton90 Date: Mon, 5 Aug 2019 15:44:46 +0100 Subject: [PATCH] feat(build): builds use a chunk group hash webpackCompilationHash has been replaced with a generated named chunk group hash. This reduces that blast radius of changed built files between builds --- packages/gatsby/src/commands/build.js | 37 ++++++++++++++++++- packages/gatsby/src/query/query-runner.js | 4 +- .../__tests__/__snapshots__/index.js.snap | 1 + packages/gatsby/src/redux/index.js | 1 + packages/gatsby/src/redux/reducers/index.js | 1 + .../reducers/webpack-chunk-group-hashes.js | 8 ++++ .../utils/gatsby-webpack-stats-extractor.js | 3 +- packages/gatsby/src/utils/page-data.js | 9 ++--- packages/gatsby/src/utils/worker/page-data.js | 12 ++---- 9 files changed, 58 insertions(+), 18 deletions(-) create mode 100644 packages/gatsby/src/redux/reducers/webpack-chunk-group-hashes.js diff --git a/packages/gatsby/src/commands/build.js b/packages/gatsby/src/commands/build.js index adc5ad88199a0..1b5884bdca9b5 100644 --- a/packages/gatsby/src/commands/build.js +++ b/packages/gatsby/src/commands/build.js @@ -102,6 +102,41 @@ module.exports = async function build(program: BuildArgs) { }) activity.start() + const { namedChunkGroups, chunks } = stats.toJson({ + all: false, + chunkGroups: true, + chunks: true, + }) + + // Create a chunkGroupHash per namedChunkGroups entry + + const getHashForChunk = chunkId => { + const chunk = chunks.find(({ id }) => id === chunkId) + return chunk && chunk.hash + } + const getHashForNamedChunk = ({ chunks }) => + chunks.reduce( + (namedChunkHash, chunkId) => + `${namedChunkHash}${getHashForChunk(chunkId)}`, + `` + ) + + // Get app chunk hash as that will be needed for all other named chunk groups + + // Construct hashes for remaining named chunk groups + const namedChunkHashes = Object.keys(namedChunkGroups).reduce( + (groupHashes, key) => { + groupHashes[key] = `${getHashForNamedChunk(namedChunkGroups[key])}` + return groupHashes + }, + {} + ) + + store.dispatch({ + type: `SET_WEBPACK_CHUNK_GROUP_HASHES`, + payload: namedChunkHashes, + }) + // We need to update all page-data.json files with the new // compilation hash. As a performance optimization however, we // don't update the files for `pageQueryIds` (dirty queries), @@ -113,7 +148,7 @@ module.exports = async function build(program: BuildArgs) { await pageDataUtil.updateCompilationHashes( { publicDir, workerPool }, cleanPagePaths, - webpackCompilationHash + namedChunkHashes ) activity.end() diff --git a/packages/gatsby/src/query/query-runner.js b/packages/gatsby/src/query/query-runner.js index 958f603a95700..d38390bd988ea 100644 --- a/packages/gatsby/src/query/query-runner.js +++ b/packages/gatsby/src/query/query-runner.js @@ -28,7 +28,7 @@ module.exports = async (queryJob: QueryJob) => { schema, schemaCustomization, program, - webpackCompilationHash, + webpackChunkGroupHashes, } = store.getState() const graphql = (query, context) => @@ -105,7 +105,7 @@ ${formatErrorDetails(errorDetails)}`) { publicDir }, page, result, - webpackCompilationHash + webpackChunkGroupHashes[page.componentChunkName] ) } else { // The babel plugin is hard-coded to load static queries from diff --git a/packages/gatsby/src/redux/__tests__/__snapshots__/index.js.snap b/packages/gatsby/src/redux/__tests__/__snapshots__/index.js.snap index f48660c52827e..53e3591d9fd4f 100644 --- a/packages/gatsby/src/redux/__tests__/__snapshots__/index.js.snap +++ b/packages/gatsby/src/redux/__tests__/__snapshots__/index.js.snap @@ -20,6 +20,7 @@ Object { "status": Object { "plugins": Object {}, }, + "webpackChunkGroupHashes": "", "webpackCompilationHash": "", } `; diff --git a/packages/gatsby/src/redux/index.js b/packages/gatsby/src/redux/index.js index 455a5c666b9ac..6c0f4714412fe 100644 --- a/packages/gatsby/src/redux/index.js +++ b/packages/gatsby/src/redux/index.js @@ -60,6 +60,7 @@ const saveState = () => { `components`, `staticQueryComponents`, `webpackCompilationHash`, + `webpackChunkGroupHashes`, ]) return writeToCache(pickedState) diff --git a/packages/gatsby/src/redux/reducers/index.js b/packages/gatsby/src/redux/reducers/index.js index 53487ba8e9e8e..c071b70a7e2e8 100644 --- a/packages/gatsby/src/redux/reducers/index.js +++ b/packages/gatsby/src/redux/reducers/index.js @@ -38,6 +38,7 @@ module.exports = { jobs: require(`./jobs`), webpack: require(`./webpack`), webpackCompilationHash: require(`./webpack-compilation-hash`), + webpackChunkGroupHashes: require(`./webpack-chunk-group-hashes`), redirects: require(`./redirects`), babelrc: require(`./babelrc`), schemaCustomization: require(`./schema-customization`), diff --git a/packages/gatsby/src/redux/reducers/webpack-chunk-group-hashes.js b/packages/gatsby/src/redux/reducers/webpack-chunk-group-hashes.js new file mode 100644 index 0000000000000..e9d7e70e78848 --- /dev/null +++ b/packages/gatsby/src/redux/reducers/webpack-chunk-group-hashes.js @@ -0,0 +1,8 @@ +module.exports = (state = ``, action) => { + switch (action.type) { + case `SET_WEBPACK_CHUNK_GROUP_HASHES`: + return action.payload + default: + return state + } +} diff --git a/packages/gatsby/src/utils/gatsby-webpack-stats-extractor.js b/packages/gatsby/src/utils/gatsby-webpack-stats-extractor.js index 4c0186cf8bfdd..0293dd36115cd 100644 --- a/packages/gatsby/src/utils/gatsby-webpack-stats-extractor.js +++ b/packages/gatsby/src/utils/gatsby-webpack-stats-extractor.js @@ -27,9 +27,10 @@ class GatsbyWebpackStatsExtractor { } } const webpackStats = { - ...stats.toJson({ all: false, chunkGroups: true }), + ...stats.toJson({ all: false, chunkGroups: true, chunks: true }), assetsByChunkName: assets, } + fs.writeFile( path.join(`public`, `chunk-map.json`), JSON.stringify(assetsMap), diff --git a/packages/gatsby/src/utils/page-data.js b/packages/gatsby/src/utils/page-data.js index a435c62182b3c..0a2f93624d0a1 100644 --- a/packages/gatsby/src/utils/page-data.js +++ b/packages/gatsby/src/utils/page-data.js @@ -28,15 +28,12 @@ const write = async ({ publicDir }, page, result, webpackCompilationHash) => { const updateCompilationHashes = ( { publicDir, workerPool }, pagePaths, - webpackCompilationHash + namedChunkHashes ) => { const segments = chunk(pagePaths, 50) + console.log(`Passed namedChunkHashes:: `, JSON.stringify(namedChunkHashes)) return Promise.map(segments, segment => - workerPool.updateCompilationHashes( - { publicDir }, - segment, - webpackCompilationHash - ) + workerPool.updateCompilationHashes({ publicDir }, segment, namedChunkHashes) ) } diff --git a/packages/gatsby/src/utils/worker/page-data.js b/packages/gatsby/src/utils/worker/page-data.js index 4fca4b4b1c9e2..0746f2e95a0e2 100644 --- a/packages/gatsby/src/utils/worker/page-data.js +++ b/packages/gatsby/src/utils/worker/page-data.js @@ -7,22 +7,18 @@ const getFilePath = ({ publicDir }, pagePath) => { return path.join(publicDir, `page-data`, fixedPagePath, `page-data.json`) } -const updateJsonFileField = async (filename, fieldname, value) => { +const updateJsonFileField = async (filename, fieldname, namedChunkHashes) => { const object = JSON.parse(await fs.readFile(filename, `utf-8`)) - object[fieldname] = value + object[fieldname] = namedChunkHashes[object.componentChunkName] await fs.outputFile(filename, JSON.stringify(object), `utf-8`) } -const updateCompilationHashes = ( - { publicDir }, - pagePaths, - webpackCompilationHash -) => +const updateCompilationHashes = ({ publicDir }, pagePaths, namedChunkHashes) => Promise.map(pagePaths, pagePath => updateJsonFileField( getFilePath({ publicDir }, pagePath), `webpackCompilationHash`, - webpackCompilationHash + namedChunkHashes ) )