1
0
Fork 0
mirror of https://github.com/terribleplan/next.js.git synced 2024-01-19 02:48:18 +00:00

Combine source maps (#3178)

* Propagate source maps through combine assets step

* Use constant development build id

* Move combine assets step before uglify step

This ensures that uglify will catch these changes.

* Move dynamic chunks step before uglify step

This ensures that uglify will catch these changes.

* Use chunk templates for page and dynamic chunks

This is a little more in line with how webpack generates its bootstrap and should have better compatibility with other plugins and source map generation.

* Register combined source map with chunks

This ensures that a sourcemap is fully generated.

* Do not minimize combined map inputs
This commit is contained in:
Kevin Decker 2017-10-30 09:57:35 -05:00 committed by Tim Neutkens
parent 4c18678d54
commit 53a2c5a7fc
6 changed files with 106 additions and 82 deletions

View file

@ -1,3 +1,5 @@
import { ConcatSource } from 'webpack-sources'
// This plugin combines a set of assets into a single asset
// This should be only used with text assets,
// otherwise the result is unpredictable.
@ -8,23 +10,28 @@ export default class CombineAssetsPlugin {
}
apply (compiler) {
compiler.plugin('after-compile', (compilation, callback) => {
let newSource = ''
this.input.forEach((name) => {
const asset = compilation.assets[name]
if (!asset) return
compiler.plugin('compilation', (compilation) => {
compilation.plugin('additional-chunk-assets', (chunks) => {
const concat = new ConcatSource()
newSource += `${asset.source()}\n`
this.input.forEach((name) => {
const asset = compilation.assets[name]
if (!asset) return
// We keep existing assets since that helps when analyzing the bundle
concat.add(asset)
// We keep existing assets since that helps when analyzing the bundle
})
compilation.additionalChunkAssets.push(this.output)
compilation.assets[this.output] = concat
// Register the combined file as an output of the associated chunks
chunks.filter((chunk) => {
return chunk.files.reduce((prev, file) => prev || this.input.includes(file), false)
})
.forEach((chunk) => chunk.files.push(this.output))
})
compilation.assets[this.output] = {
source: () => newSource,
size: () => newSource.length
}
callback()
})
}
}

View file

@ -1,39 +1,48 @@
export default class PagesPlugin {
apply (compiler) {
const isImportChunk = /^chunks[/\\].*\.js$/
const matchChunkName = /^chunks[/\\](.*)$/
import { ConcatSource } from 'webpack-sources'
compiler.plugin('after-compile', (compilation, callback) => {
const chunks = Object
.keys(compilation.namedChunks)
.map(key => compilation.namedChunks[key])
.filter(chunk => isImportChunk.test(chunk.name))
const isImportChunk = /^chunks[/\\].*\.js$/
const matchChunkName = /^chunks[/\\](.*)$/
chunks.forEach((chunk) => {
const asset = compilation.assets[chunk.name]
if (!asset) return
class DynamicChunkTemplatePlugin {
apply (chunkTemplate) {
chunkTemplate.plugin('render', function (modules, chunk) {
if (!isImportChunk.test(chunk.name)) {
return modules
}
const chunkName = matchChunkName.exec(chunk.name)[1]
const chunkName = matchChunkName.exec(chunk.name)[1]
const source = new ConcatSource()
const content = asset.source()
const newContent = `
window.__NEXT_REGISTER_CHUNK('${chunkName}', function() {
${content}
})
`
// Replace the exisiting chunk with the new content
compilation.assets[chunk.name] = {
source: () => newContent,
size: () => newContent.length
}
source.add(`
__NEXT_REGISTER_CHUNK('${chunkName}', function() {
`)
source.add(modules)
source.add(`
})
`)
// This is to support, webpack dynamic import support with HMR
compilation.assets[`chunks/${chunk.id}`] = {
source: () => newContent,
size: () => newContent.length
}
})
callback()
return source
})
}
}
export default class DynamicChunksPlugin {
apply (compiler) {
compiler.plugin('compilation', (compilation) => {
compilation.chunkTemplate.apply(new DynamicChunkTemplatePlugin())
compilation.plugin('additional-chunk-assets', (chunks) => {
chunks = chunks.filter(chunk =>
isImportChunk.test(chunk.name) && compilation.assets[chunk.name]
)
chunks.forEach((chunk) => {
// This is to support, webpack dynamic import support with HMR
const copyFilename = `chunks/${chunk.name}`
compilation.additionalChunkAssets.push(copyFilename)
compilation.assets[copyFilename] = compilation.assets[chunk.name]
})
})
})
}
}

View file

@ -1,20 +1,17 @@
import { ConcatSource } from 'webpack-sources'
import {
IS_BUNDLED_PAGE,
MATCH_ROUTE_NAME
} from '../../utils'
export default class PagesPlugin {
apply (compiler) {
compiler.plugin('after-compile', (compilation, callback) => {
const pages = Object
.keys(compilation.namedChunks)
.map(key => compilation.namedChunks[key])
.filter(chunk => IS_BUNDLED_PAGE.test(chunk.name))
class PageChunkTemplatePlugin {
apply (chunkTemplate) {
chunkTemplate.plugin('render', function (modules, chunk) {
if (!IS_BUNDLED_PAGE.test(chunk.name)) {
return modules
}
pages.forEach((chunk) => {
const page = compilation.assets[chunk.name]
const pageName = MATCH_ROUTE_NAME.exec(chunk.name)[1]
let routeName = pageName
let routeName = MATCH_ROUTE_NAME.exec(chunk.name)[1]
// We need to convert \ into / when we are in windows
// to get the proper route name
@ -22,26 +19,33 @@ export default class PagesPlugin {
// to have "\" in the filename in unix.
// Anyway if someone did that, he'll be having issues here.
// But that's something we cannot avoid.
if (/^win/.test(process.platform)) {
routeName = routeName.replace(/\\/g, '/')
}
if (/^win/.test(process.platform)) {
routeName = routeName.replace(/\\/g, '/')
}
routeName = `/${routeName.replace(/(^|\/)index$/, '')}`
routeName = `/${routeName.replace(/(^|\/)index$/, '')}`
const content = page.source()
const newContent = `
window.__NEXT_REGISTER_PAGE('${routeName}', function() {
var comp = ${content}
return { page: comp.default }
})
`
// Replace the exisiting chunk with the new content
compilation.assets[chunk.name] = {
source: () => newContent,
size: () => newContent.length
}
})
callback()
const source = new ConcatSource()
source.add(`
__NEXT_REGISTER_PAGE('${routeName}', function() {
var comp =
`)
source.add(modules)
source.add(`
return { page: comp.default }
})
`)
return source
})
}
}
export default class PagesPlugin {
apply (compiler) {
compiler.plugin('compilation', (compilation) => {
compilation.chunkTemplate.apply(new PageChunkTemplatePlugin())
})
}
}

View file

@ -149,6 +149,7 @@ export default async function createCompiler (dir, { buildId, dev = false, quiet
output: 'app.js'
}),
new webpack.optimize.UglifyJsPlugin({
exclude: ['manifest.js', 'commons.js', 'main.js'],
compress: { warnings: false },
sourceMap: false
})

View file

@ -408,7 +408,11 @@ export default class Server {
}
handleBuildId (buildId, res) {
if (this.dev) return true
if (this.dev) {
res.setHeader('Cache-Control', 'no-store, must-revalidate')
return true
}
if (buildId !== this.renderOpts.buildId) {
return false
}
@ -428,13 +432,17 @@ export default class Server {
}
handleBuildHash (filename, hash, res) {
if (this.dev) return
if (this.dev) {
res.setHeader('Cache-Control', 'no-store, must-revalidate')
return true
}
if (hash !== this.buildStats[filename].hash) {
throw new Error(`Invalid Build File Hash(${hash}) for chunk: ${filename}`)
}
res.setHeader('Cache-Control', 'max-age=365000000, immutable')
return true
}
send404 (res) {

View file

@ -96,11 +96,6 @@ async function doRender (req, res, pathname, query, {
}
const docProps = await loadGetInitialProps(Document, { ...ctx, renderPage })
// While developing, we should not cache any assets.
// So, we use a different buildId for each page load.
// With that we can ensure, we have unique URL for assets per every page load.
// So, it'll prevent issues like this: https://git.io/vHLtb
const devBuildId = Date.now()
if (res.finished) return
@ -110,7 +105,7 @@ async function doRender (req, res, pathname, query, {
props,
pathname,
query,
buildId: dev ? devBuildId : buildId,
buildId,
buildStats,
assetPrefix,
nextExport,