mirror of
https://github.com/terribleplan/next.js.git
synced 2024-01-19 02:48:18 +00:00
165924b71b
* Server JSON pages directly from the filesystem. * Make Json pages even if there's an error. * Implement much better page serving. * Use JsonPagesPlugin in the production mode as well. * Add gzip support for JSON pages. * Use glob-promise instead of recursive-readdir * Handle renderStatic 404 properly. * Simply the gzip code. * Cache already read JSON pages. * Change JSON pages extension to .json. * Fix HMR related issue. * Fix hot-reload for .json solely on server. * Properly clear cache on hot-reloader. * Convert .js pages into .json page right inside the plugin. * Fix gzipping .json pages. * Remove unwanted json pages cleanup. * Get rid of deprecated fs.exists for fs.access
39 lines
990 B
JavaScript
39 lines
990 B
JavaScript
import fs from 'fs'
|
|
import path from 'path'
|
|
import zlib from 'zlib'
|
|
import glob from 'glob-promise'
|
|
|
|
export default async function gzipAssets (dir) {
|
|
const nextDir = path.resolve(dir, '.next')
|
|
|
|
const coreAssets = [
|
|
path.join(nextDir, 'commons.js'),
|
|
path.join(nextDir, 'main.js')
|
|
]
|
|
const pages = await glob('bundles/pages/**/*.json', { cwd: nextDir })
|
|
|
|
const allAssets = [
|
|
...coreAssets,
|
|
...pages.map(page => path.join(nextDir, page))
|
|
]
|
|
|
|
while (true) {
|
|
// gzip only 10 assets in parallel at a time.
|
|
const currentChunk = allAssets.splice(0, 10)
|
|
if (currentChunk.length === 0) break
|
|
|
|
await Promise.all(currentChunk.map(gzip))
|
|
}
|
|
}
|
|
|
|
export function gzip (filePath) {
|
|
const input = fs.createReadStream(filePath)
|
|
const output = fs.createWriteStream(`${filePath}.gz`)
|
|
|
|
return new Promise((resolve, reject) => {
|
|
const stream = input.pipe(zlib.createGzip()).pipe(output)
|
|
stream.on('error', reject)
|
|
stream.on('finish', resolve)
|
|
})
|
|
}
|