Skip to content

Commit

Permalink
feat(build): builds use a chunk group hash
Browse files Browse the repository at this point in the history
webpackCompilationHash has been replaced with a generated named chunk group hash. This reduces that
blast radius of changed built files between builds
  • Loading branch information
afenton90 committed Aug 5, 2019
1 parent f38b40d commit 58849a2
Show file tree
Hide file tree
Showing 9 changed files with 58 additions and 18 deletions.
37 changes: 36 additions & 1 deletion packages/gatsby/src/commands/build.js
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,41 @@ module.exports = async function build(program: BuildArgs) {
})
activity.start()

const { namedChunkGroups, chunks } = stats.toJson({
all: false,
chunkGroups: true,
chunks: true,
})

// Create a chunkGroupHash per namedChunkGroups entry

const getHashForChunk = chunkId => {
const chunk = chunks.find(({ id }) => id === chunkId)
return chunk && chunk.hash
}
const getHashForNamedChunk = ({ chunks }) =>
chunks.reduce(
(namedChunkHash, chunkId) =>
`${namedChunkHash}${getHashForChunk(chunkId)}`,
``
)

// Get app chunk hash as that will be needed for all other named chunk groups

// Construct hashes for remaining named chunk groups
const namedChunkHashes = Object.keys(namedChunkGroups).reduce(
(groupHashes, key) => {
groupHashes[key] = `${getHashForNamedChunk(namedChunkGroups[key])}`
return groupHashes
},
{}
)

store.dispatch({
type: `SET_WEBPACK_CHUNK_GROUP_HASHES`,
payload: namedChunkHashes,
})

// We need to update all page-data.json files with the new
// compilation hash. As a performance optimization however, we
// don't update the files for `pageQueryIds` (dirty queries),
Expand All @@ -113,7 +148,7 @@ module.exports = async function build(program: BuildArgs) {
await pageDataUtil.updateCompilationHashes(
{ publicDir, workerPool },
cleanPagePaths,
webpackCompilationHash
namedChunkHashes
)

activity.end()
Expand Down
4 changes: 2 additions & 2 deletions packages/gatsby/src/query/query-runner.js
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ module.exports = async (queryJob: QueryJob) => {
schema,
schemaCustomization,
program,
webpackCompilationHash,
webpackChunkGroupHashes,
} = store.getState()

const graphql = (query, context) =>
Expand Down Expand Up @@ -105,7 +105,7 @@ ${formatErrorDetails(errorDetails)}`)
{ publicDir },
page,
result,
webpackCompilationHash
webpackChunkGroupHashes[page.componentChunkName]
)
} else {
// The babel plugin is hard-coded to load static queries from
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ Object {
"status": Object {
"plugins": Object {},
},
"webpackChunkGroupHashes": "",
"webpackCompilationHash": "",
}
`;
1 change: 1 addition & 0 deletions packages/gatsby/src/redux/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@ const saveState = () => {
`components`,
`staticQueryComponents`,
`webpackCompilationHash`,
`webpackChunkGroupHashes`,
])

return writeToCache(pickedState)
Expand Down
1 change: 1 addition & 0 deletions packages/gatsby/src/redux/reducers/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ module.exports = {
jobs: require(`./jobs`),
webpack: require(`./webpack`),
webpackCompilationHash: require(`./webpack-compilation-hash`),
webpackChunkGroupHashes: require(`./webpack-chunk-group-hashes`),
redirects: require(`./redirects`),
babelrc: require(`./babelrc`),
schemaCustomization: require(`./schema-customization`),
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
module.exports = (state = ``, action) => {
switch (action.type) {
case `SET_WEBPACK_CHUNK_GROUP_HASHES`:
return action.payload
default:
return state
}
}
3 changes: 2 additions & 1 deletion packages/gatsby/src/utils/gatsby-webpack-stats-extractor.js
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,10 @@ class GatsbyWebpackStatsExtractor {
}
}
const webpackStats = {
...stats.toJson({ all: false, chunkGroups: true }),
...stats.toJson({ all: false, chunkGroups: true, chunks: true }),
assetsByChunkName: assets,
}

fs.writeFile(
path.join(`public`, `chunk-map.json`),
JSON.stringify(assetsMap),
Expand Down
9 changes: 3 additions & 6 deletions packages/gatsby/src/utils/page-data.js
Original file line number Diff line number Diff line change
Expand Up @@ -28,15 +28,12 @@ const write = async ({ publicDir }, page, result, webpackCompilationHash) => {
const updateCompilationHashes = (
{ publicDir, workerPool },
pagePaths,
webpackCompilationHash
namedChunkHashes
) => {
const segments = chunk(pagePaths, 50)
console.log(`Passed namedChunkHashes:: `, JSON.stringify(namedChunkHashes))
return Promise.map(segments, segment =>
workerPool.updateCompilationHashes(
{ publicDir },
segment,
webpackCompilationHash
)
workerPool.updateCompilationHashes({ publicDir }, segment, namedChunkHashes)
)
}

Expand Down
12 changes: 4 additions & 8 deletions packages/gatsby/src/utils/worker/page-data.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,22 +7,18 @@ const getFilePath = ({ publicDir }, pagePath) => {
return path.join(publicDir, `page-data`, fixedPagePath, `page-data.json`)
}

const updateJsonFileField = async (filename, fieldname, value) => {
const updateJsonFileField = async (filename, fieldname, namedChunkHashes) => {
const object = JSON.parse(await fs.readFile(filename, `utf-8`))
object[fieldname] = value
object[fieldname] = namedChunkHashes[object.componentChunkName]
await fs.outputFile(filename, JSON.stringify(object), `utf-8`)
}

const updateCompilationHashes = (
{ publicDir },
pagePaths,
webpackCompilationHash
) =>
const updateCompilationHashes = ({ publicDir }, pagePaths, namedChunkHashes) =>
Promise.map(pagePaths, pagePath =>
updateJsonFileField(
getFilePath({ publicDir }, pagePath),
`webpackCompilationHash`,
webpackCompilationHash
namedChunkHashes
)
)

Expand Down

0 comments on commit 58849a2

Please sign in to comment.