1
0
Fork 0
mirror of https://github.com/terribleplan/next.js.git synced 2024-01-19 02:48:18 +00:00

Revert "Sourcemap and Breakpoint Fixes (#3121)"

This reverts commit 964f229f98.
This commit is contained in:
Tim Neutkens 2017-10-20 08:02:04 +02:00
parent 81479eb601
commit 1cc3dbe35b
5 changed files with 81 additions and 90 deletions

View file

@ -1,5 +1,3 @@
import { ConcatSource } from 'webpack-sources'
// This plugin combines a set of assets into a single asset // This plugin combines a set of assets into a single asset
// This should be only used with text assets, // This should be only used with text assets,
// otherwise the result is unpredictable. // otherwise the result is unpredictable.
@ -10,23 +8,23 @@ export default class CombineAssetsPlugin {
} }
apply (compiler) { apply (compiler) {
compiler.plugin('compilation', (compilation) => { compiler.plugin('after-compile', (compilation, callback) => {
compilation.plugin('optimize-chunk-assets', (chunks, callback) => { let newSource = ''
const concat = new ConcatSource() this.input.forEach((name) => {
const asset = compilation.assets[name]
if (!asset) return
this.input.forEach((name) => { newSource += `${asset.source()}\n`
const asset = compilation.assets[name]
if (!asset) return
concat.add(asset) // We keep existing assets since that helps when analyzing the bundle
// We keep existing assets since that helps when analyzing the bundle
})
compilation.assets[this.output] = concat
callback()
}) })
compilation.assets[this.output] = {
source: () => newSource,
size: () => newSource.length
}
callback()
}) })
} }
} }

View file

@ -1,37 +1,39 @@
import { ConcatSource } from 'webpack-sources' export default class PagesPlugin {
export default class DynamicChunksPlugin {
apply (compiler) { apply (compiler) {
const isImportChunk = /^chunks[/\\].*\.js$/ const isImportChunk = /^chunks[/\\].*\.js$/
const matchChunkName = /^chunks[/\\](.*)$/ const matchChunkName = /^chunks[/\\](.*)$/
compiler.plugin('compilation', (compilation) => { compiler.plugin('after-compile', (compilation, callback) => {
compilation.plugin('optimize-chunk-assets', (chunks, callback) => { const chunks = Object
chunks = chunks.filter(chunk => isImportChunk.test(chunk.name)) .keys(compilation.namedChunks)
.map(key => compilation.namedChunks[key])
.filter(chunk => isImportChunk.test(chunk.name))
chunks.forEach((chunk) => { chunks.forEach((chunk) => {
const asset = compilation.assets[chunk.name] const asset = compilation.assets[chunk.name]
if (!asset) return if (!asset) return
const chunkName = matchChunkName.exec(chunk.name)[1] const chunkName = matchChunkName.exec(chunk.name)[1]
const concat = new ConcatSource()
concat.add(`__NEXT_REGISTER_CHUNK('${chunkName}', function() { const content = asset.source()
`) const newContent = `
concat.add(asset) window.__NEXT_REGISTER_CHUNK('${chunkName}', function() {
concat.add(` ${content}
}) })
`) `
// Replace the exisiting chunk with the new content
compilation.assets[chunk.name] = {
source: () => newContent,
size: () => newContent.length
}
// Replace the exisiting chunk with the new content // This is to support, webpack dynamic import support with HMR
compilation.assets[chunk.name] = concat compilation.assets[`chunks/${chunk.id}`] = {
source: () => newContent,
// This is to support, webpack dynamic import support with HMR size: () => newContent.length
compilation.assets[`chunks/${chunk.name}`] = concat }
})
callback()
}) })
callback()
}) })
} }
} }

View file

@ -1,4 +1,3 @@
import { ConcatSource } from 'webpack-sources'
import { import {
IS_BUNDLED_PAGE, IS_BUNDLED_PAGE,
MATCH_ROUTE_NAME MATCH_ROUTE_NAME
@ -6,48 +5,43 @@ import {
export default class PagesPlugin { export default class PagesPlugin {
apply (compiler) { apply (compiler) {
compiler.plugin('compilation', (compilation) => { compiler.plugin('after-compile', (compilation, callback) => {
compilation.plugin('optimize-chunk-assets', (chunks, callback) => { const pages = Object
const pages = chunks.filter(chunk => IS_BUNDLED_PAGE.test(chunk.name)) .keys(compilation.namedChunks)
.map(key => compilation.namedChunks[key])
.filter(chunk => IS_BUNDLED_PAGE.test(chunk.name))
pages.forEach((chunk) => { pages.forEach((chunk) => {
const pageName = MATCH_ROUTE_NAME.exec(chunk.name)[1] const page = compilation.assets[chunk.name]
let routeName = pageName const pageName = MATCH_ROUTE_NAME.exec(chunk.name)[1]
let routeName = pageName
// We need to convert \ into / when we are in windows // We need to convert \ into / when we are in windows
// to get the proper route name // to get the proper route name
// Here we need to do windows check because it's possible // Here we need to do windows check because it's possible
// to have "\" in the filename in unix. // to have "\" in the filename in unix.
// Anyway if someone did that, he'll be having issues here. // Anyway if someone did that, he'll be having issues here.
// But that's something we cannot avoid. // But that's something we cannot avoid.
if (/^win/.test(process.platform)) { if (/^win/.test(process.platform)) {
routeName = routeName.replace(/\\/g, '/') routeName = routeName.replace(/\\/g, '/')
} }
routeName = `/${routeName.replace(/(^|\/)index$/, '')}` routeName = `/${routeName.replace(/(^|\/)index$/, '')}`
// Replace the exisiting chunk with the new content const content = page.source()
const asset = compilation.assets[chunk.name] const newContent = `
if (!asset) return window.__NEXT_REGISTER_PAGE('${routeName}', function() {
var comp = ${content}
const concat = new ConcatSource() return { page: comp.default }
})
concat.add(` `
__NEXT_REGISTER_PAGE('${routeName}', function() { // Replace the exisiting chunk with the new content
var comp = compilation.assets[chunk.name] = {
`) source: () => newContent,
concat.add(asset) size: () => newContent.length
concat.add(` }
return { page: comp.default }
})
`)
// Replace the exisiting chunk with the new content
compilation.assets[chunk.name] = concat
})
callback()
}) })
callback()
}) })
} }
} }

View file

@ -408,11 +408,7 @@ export default class Server {
} }
handleBuildId (buildId, res) { handleBuildId (buildId, res) {
if (this.dev) { if (this.dev) return true
res.setHeader('Cache-Control', 'no-store, must-revalidate')
return true
}
if (buildId !== this.renderOpts.buildId) { if (buildId !== this.renderOpts.buildId) {
return false return false
} }
@ -432,17 +428,13 @@ export default class Server {
} }
handleBuildHash (filename, hash, res) { handleBuildHash (filename, hash, res) {
if (this.dev) { if (this.dev) return
res.setHeader('Cache-Control', 'no-store, must-revalidate')
return true
}
if (hash !== this.buildStats[filename].hash) { if (hash !== this.buildStats[filename].hash) {
throw new Error(`Invalid Build File Hash(${hash}) for chunk: ${filename}`) throw new Error(`Invalid Build File Hash(${hash}) for chunk: ${filename}`)
} }
res.setHeader('Cache-Control', 'max-age=365000000, immutable') res.setHeader('Cache-Control', 'max-age=365000000, immutable')
return true
} }
send404 (res) { send404 (res) {

View file

@ -93,6 +93,11 @@ async function doRender (req, res, pathname, query, {
} }
const docProps = await loadGetInitialProps(Document, { ...ctx, renderPage }) const docProps = await loadGetInitialProps(Document, { ...ctx, renderPage })
// While developing, we should not cache any assets.
// So, we use a different buildId for each page load.
// With that we can ensure, we have unique URL for assets per every page load.
// So, it'll prevent issues like this: https://git.io/vHLtb
const devBuildId = Date.now()
if (res.finished) return if (res.finished) return
@ -102,7 +107,7 @@ async function doRender (req, res, pathname, query, {
props, props,
pathname, pathname,
query, query,
buildId, buildId: dev ? devBuildId : buildId,
buildStats, buildStats,
assetPrefix, assetPrefix,
nextExport, nextExport,