add cache compression

This commit is contained in:
Tobias Koppers 2021-06-30 00:05:38 +02:00
parent 29eff8a74e
commit 492055ae90
13 changed files with 174 additions and 44 deletions

View File

@ -19,6 +19,7 @@
"preloading",
"preloaded",
"gzipping",
"brotli",
"submodule",
"submodules",
"typeof",

View File

@ -914,6 +914,10 @@ export interface FileCacheOptions {
* Locations for the cache (defaults to cacheDirectory / name).
*/
cacheLocation?: string;
/**
* Compression type used for the cache files.
*/
compression?: false | "gzip" | "brotli";
/**
* Algorithm used for generation the hash (see node.js crypto package).
*/

View File

@ -572,7 +572,8 @@ class WebpackOptionsApply extends OptionsApply {
snapshot: options.snapshot,
maxAge: cacheOptions.maxAge,
profile: cacheOptions.profile,
allowCollectingMemory: cacheOptions.allowCollectingMemory
allowCollectingMemory: cacheOptions.allowCollectingMemory,
compression: cacheOptions.compression
}),
cacheOptions.idleTimeout,
cacheOptions.idleTimeoutForInitialStore,

View File

@ -848,6 +848,7 @@ class PackFileCacheStrategy {
* @param {number} options.maxAge max age of cache items
* @param {boolean} options.profile track and log detailed timing information for individual cache items
* @param {boolean} options.allowCollectingMemory allow to collect unused memory created during deserialization
* @param {false | "gzip" | "brotli"} options.compression compression used
*/
constructor({
compiler,
@ -859,7 +860,8 @@ class PackFileCacheStrategy {
snapshot,
maxAge,
profile,
allowCollectingMemory
allowCollectingMemory,
compression
}) {
this.fileSerializer = createFileSerializer(fs);
this.fileSystemInfo = new FileSystemInfo(fs, {
@ -875,6 +877,13 @@ class PackFileCacheStrategy {
this.maxAge = maxAge;
this.profile = profile;
this.allowCollectingMemory = allowCollectingMemory;
this.compression = compression;
this._extension =
compression === "brotli"
? ".pack.br"
: compression === "gzip"
? ".pack.gz"
: ".pack";
this.snapshot = snapshot;
/** @type {Set<string>} */
this.buildDependencies = new Set();
@ -916,8 +925,8 @@ class PackFileCacheStrategy {
logger.time("restore cache container");
return this.fileSerializer
.deserialize(null, {
filename: `${cacheLocation}/index.pack`,
extension: ".pack",
filename: `${cacheLocation}/index${this._extension}`,
extension: `${this._extension}`,
logger,
profile,
retainedBuffer: this.allowCollectingMemory
@ -927,11 +936,13 @@ class PackFileCacheStrategy {
.catch(err => {
if (err.code !== "ENOENT") {
logger.warn(
`Restoring pack failed from ${cacheLocation}.pack: ${err}`
`Restoring pack failed from ${cacheLocation}${this._extension}: ${err}`
);
logger.debug(err.stack);
} else {
logger.debug(`No pack exists at ${cacheLocation}.pack: ${err}`);
logger.debug(
`No pack exists at ${cacheLocation}${this._extension}: ${err}`
);
}
return undefined;
})
@ -940,14 +951,14 @@ class PackFileCacheStrategy {
if (!packContainer) return undefined;
if (!(packContainer instanceof PackContainer)) {
logger.warn(
`Restored pack from ${cacheLocation}.pack, but contained content is unexpected.`,
`Restored pack from ${cacheLocation}${this._extension}, but contained content is unexpected.`,
packContainer
);
return undefined;
}
if (packContainer.version !== version) {
logger.log(
`Restored pack from ${cacheLocation}.pack, but version doesn't match.`
`Restored pack from ${cacheLocation}${this._extension}, but version doesn't match.`
);
return undefined;
}
@ -959,14 +970,14 @@ class PackFileCacheStrategy {
(err, valid) => {
if (err) {
logger.log(
`Restored pack from ${cacheLocation}.pack, but checking snapshot of build dependencies errored: ${err}.`
`Restored pack from ${cacheLocation}${this._extension}, but checking snapshot of build dependencies errored: ${err}.`
);
logger.debug(err.stack);
return resolve(false);
}
if (!valid) {
logger.log(
`Restored pack from ${cacheLocation}.pack, but build dependencies have changed.`
`Restored pack from ${cacheLocation}${this._extension}, but build dependencies have changed.`
);
return resolve(false);
}
@ -981,7 +992,7 @@ class PackFileCacheStrategy {
(err, valid) => {
if (err) {
logger.log(
`Restored pack from ${cacheLocation}.pack, but checking snapshot of resolving of build dependencies errored: ${err}.`
`Restored pack from ${cacheLocation}${this._extension}, but checking snapshot of resolving of build dependencies errored: ${err}.`
);
logger.debug(err.stack);
return resolve(false);
@ -1001,7 +1012,7 @@ class PackFileCacheStrategy {
(err, valid) => {
if (err) {
logger.log(
`Restored pack from ${cacheLocation}.pack, but resolving of build dependencies errored: ${err}.`
`Restored pack from ${cacheLocation}${this._extension}, but resolving of build dependencies errored: ${err}.`
);
logger.debug(err.stack);
return resolve(false);
@ -1012,7 +1023,7 @@ class PackFileCacheStrategy {
return resolve(true);
}
logger.log(
`Restored pack from ${cacheLocation}.pack, but build dependencies resolve to different locations.`
`Restored pack from ${cacheLocation}${this._extension}, but build dependencies resolve to different locations.`
);
return resolve(false);
}
@ -1052,7 +1063,7 @@ class PackFileCacheStrategy {
})
.catch(err => {
this.logger.warn(
`Restoring pack from ${cacheLocation}.pack failed: ${err}`
`Restoring pack from ${cacheLocation}${this._extension} failed: ${err}`
);
this.logger.debug(err.stack);
return new Pack(logger, this.maxAge);
@ -1236,8 +1247,8 @@ class PackFileCacheStrategy {
);
return this.fileSerializer
.serialize(content, {
filename: `${this.cacheLocation}/index.pack`,
extension: ".pack",
filename: `${this.cacheLocation}/index${this._extension}`,
extension: `${this._extension}`,
logger: this.logger,
profile: this.profile
})

View File

@ -298,6 +298,7 @@ const applyCacheDefaults = (cache, { name, mode, development }) => {
);
D(cache, "hashAlgorithm", "md4");
D(cache, "store", "pack");
D(cache, "compression", development ? false : "gzip");
D(cache, "profile", false);
D(cache, "idleTimeout", 60000);
D(cache, "idleTimeoutForInitialStore", 5000);

View File

@ -137,6 +137,7 @@ const getNormalizedWebpackOptions = config => {
cacheDirectory: cache.cacheDirectory,
cacheLocation: cache.cacheLocation,
hashAlgorithm: cache.hashAlgorithm,
compression: cache.compression,
idleTimeout: cache.idleTimeout,
idleTimeoutForInitialStore: cache.idleTimeoutForInitialStore,
idleTimeoutAfterLargeChanges: cache.idleTimeoutAfterLargeChanges,

View File

@ -548,12 +548,7 @@ class BinaryMiddleware extends SerializerMiddleware {
const isInCurrentBuffer = n => {
return currentIsBuffer && n + currentPosition <= currentBuffer.length;
};
/**
* Reads n bytes
* @param {number} n amount of bytes to read
* @returns {Buffer} buffer with bytes
*/
const read = n => {
const ensureBuffer = () => {
if (!currentIsBuffer) {
throw new Error(
currentBuffer === null
@ -561,9 +556,31 @@ class BinaryMiddleware extends SerializerMiddleware {
: "Unexpected lazy element in stream"
);
}
};
/**
* Reads n bytes
* @param {number} n amount of bytes to read
* @returns {Buffer} buffer with bytes
*/
const read = n => {
ensureBuffer();
const rem = currentBuffer.length - currentPosition;
if (rem < n) {
return Buffer.concat([read(rem), read(n - rem)]);
const buffers = [read(rem)];
n -= rem;
ensureBuffer();
while (currentBuffer.length < n) {
const b = /** @type {Buffer} */ (currentBuffer);
buffers.push(b);
n -= b.length;
currentDataItem++;
currentBuffer =
currentDataItem < data.length ? data[currentDataItem] : null;
currentIsBuffer = Buffer.isBuffer(currentBuffer);
ensureBuffer();
}
buffers.push(read(n));
return Buffer.concat(buffers);
}
const b = /** @type {Buffer} */ (currentBuffer);
const res = Buffer.from(b.buffer, b.byteOffset + currentPosition, n);
@ -577,13 +594,7 @@ class BinaryMiddleware extends SerializerMiddleware {
* @returns {Buffer} buffer with bytes
*/
const readUpTo = n => {
if (!currentIsBuffer) {
throw new Error(
currentBuffer === null
? "Unexpected end of stream"
: "Unexpected lazy element in stream"
);
}
ensureBuffer();
const rem = currentBuffer.length - currentPosition;
if (rem < n) {
n = rem;
@ -595,13 +606,7 @@ class BinaryMiddleware extends SerializerMiddleware {
return res;
};
const readU8 = () => {
if (!currentIsBuffer) {
throw new Error(
currentBuffer === null
? "Unexpected end of stream"
: "Unexpected lazy element in stream"
);
}
ensureBuffer();
/**
* There is no need to check remaining buffer size here
* since {@link checkOverflow} guarantees at least one byte remaining

View File

@ -5,6 +5,14 @@
"use strict";
const { constants } = require("buffer");
const { pipeline } = require("stream");
const {
createBrotliCompress,
createBrotliDecompress,
createGzip,
createGunzip,
constants: zConstants
} = require("zlib");
const createHash = require("../util/createHash");
const { dirname, join, mkdirp } = require("../util/fs");
const memoize = require("../util/memoize");
@ -37,6 +45,9 @@ const hashForName = buffers => {
return /** @type {string} */ (hash.digest("hex"));
};
const COMPRESSION_CHUNK_SIZE = 100 * 1024 * 1024;
const DECOMPRESSION_CHUNK_SIZE = 100 * 1024 * 1024;
const writeUInt64LE = Buffer.prototype.writeBigUInt64LE
? (buf, value, offset) => {
buf.writeBigUInt64LE(BigInt(value), offset);
@ -69,7 +80,7 @@ const readUInt64LE = Buffer.prototype.readBigUInt64LE
* @param {FileMiddleware} middleware this
* @param {BufferSerializableType[] | Promise<BufferSerializableType[]>} data data to be serialized
* @param {string | boolean} name file base name
* @param {function(string | false, Buffer[]): Promise} writeFile writes a file
* @param {function(string | false, Buffer[]): Promise<void>} writeFile writes a file
* @returns {Promise<SerializeResult>} resulting file pointer and promise
*/
const serialize = async (middleware, data, name, writeFile) => {
@ -399,11 +410,37 @@ class FileMiddleware extends SerializerMiddleware {
? join(this.fs, filename, `../${name}${extension}`)
: filename;
await new Promise((resolve, reject) => {
const stream = this.fs.createWriteStream(file + "_");
let stream = this.fs.createWriteStream(file + "_");
let compression;
if (file.endsWith(".gz")) {
compression = createGzip({
chunkSize: COMPRESSION_CHUNK_SIZE,
level: zConstants.Z_BEST_SPEED
});
} else if (file.endsWith(".br")) {
compression = createBrotliCompress({
chunkSize: COMPRESSION_CHUNK_SIZE,
params: {
[zConstants.BROTLI_PARAM_MODE]: zConstants.BROTLI_MODE_TEXT,
[zConstants.BROTLI_PARAM_QUALITY]: 2,
[zConstants.BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING]: true,
[zConstants.BROTLI_PARAM_SIZE_HINT]: content.reduce(
(size, b) => size + b.length,
0
)
}
});
}
if (compression) {
pipeline(compression, stream, reject);
stream = compression;
stream.on("finish", () => resolve());
} else {
stream.on("error", err => reject(err));
stream.on("finish", () => resolve());
}
for (const b of content) stream.write(b);
stream.end();
stream.on("error", err => reject(err));
stream.on("finish", () => resolve());
});
if (name) allWrittenFiles.add(file);
};
@ -470,6 +507,34 @@ class FileMiddleware extends SerializerMiddleware {
let currentBuffer;
let currentBufferUsed;
const buf = [];
let decompression;
if (file.endsWith(".gz")) {
decompression = createGunzip({
chunkSize: DECOMPRESSION_CHUNK_SIZE
});
} else if (file.endsWith(".br")) {
decompression = createBrotliDecompress({
chunkSize: DECOMPRESSION_CHUNK_SIZE
});
}
if (decompression) {
let newResolve, newReject;
resolve(
Promise.all([
new Promise((rs, rj) => {
newResolve = rs;
newReject = rj;
}),
new Promise((resolve, reject) => {
decompression.on("data", chunk => buf.push(chunk));
decompression.on("end", () => resolve());
decompression.on("error", err => reject(err));
})
]).then(() => buf)
);
resolve = newResolve;
reject = newReject;
}
this.fs.open(file, "r", (err, fd) => {
if (err) {
reject(err);
@ -478,7 +543,11 @@ class FileMiddleware extends SerializerMiddleware {
const read = () => {
if (currentBuffer === undefined) {
currentBuffer = Buffer.allocUnsafeSlow(
Math.min(constants.MAX_LENGTH, remaining)
Math.min(
constants.MAX_LENGTH,
remaining,
decompression ? DECOMPRESSION_CHUNK_SIZE : Infinity
)
);
currentBufferUsed = 0;
}
@ -509,9 +578,16 @@ class FileMiddleware extends SerializerMiddleware {
currentBufferUsed += bytesRead;
remaining -= bytesRead;
if (currentBufferUsed === currentBuffer.length) {
buf.push(currentBuffer);
if (decompression) {
decompression.write(currentBuffer);
} else {
buf.push(currentBuffer);
}
currentBuffer = undefined;
if (remaining === 0) {
if (decompression) {
decompression.end();
}
this.fs.close(fd, err => {
if (err) {
reject(err);

File diff suppressed because one or more lines are too long

View File

@ -956,6 +956,10 @@
"type": "string",
"absolutePath": true
},
"compression": {
"description": "Compression type used for the cache files.",
"enum": [false, "gzip", "brotli"]
},
"hashAlgorithm": {
"description": "Algorithm used for generation the hash (see node.js crypto package).",
"type": "string"

View File

@ -1509,6 +1509,7 @@ describe("Defaults", () => {
+ },
+ "cacheDirectory": "<cwd>/node_modules/.cache/webpack",
+ "cacheLocation": "<cwd>/node_modules/.cache/webpack/default-none",
+ "compression": "gzip",
+ "hashAlgorithm": "md4",
+ "idleTimeout": 60000,
+ "idleTimeoutAfterLargeChanges": 1000,
@ -1551,6 +1552,7 @@ describe("Defaults", () => {
+ },
+ "cacheDirectory": "<cwd>/node_modules/.cache/webpack",
+ "cacheLocation": "<cwd>/node_modules/.cache/webpack/default-development",
+ "compression": false,
+ "hashAlgorithm": "md4",
+ "idleTimeout": 60000,
+ "idleTimeoutAfterLargeChanges": 1000,
@ -1798,6 +1800,7 @@ describe("Defaults", () => {
+ },
+ "cacheDirectory": "<cwd>/node_modules/.cache/webpack",
+ "cacheLocation": "<cwd>/node_modules/.cache/webpack/default-none",
+ "compression": "gzip",
+ "hashAlgorithm": "md4",
+ "idleTimeout": 60000,
+ "idleTimeoutAfterLargeChanges": 1000,

View File

@ -95,6 +95,24 @@ Object {
"multiple": false,
"simpleType": "string",
},
"cache-compression": Object {
"configs": Array [
Object {
"description": "Compression type used for the cache files.",
"multiple": false,
"path": "cache.compression",
"type": "enum",
"values": Array [
false,
"gzip",
"brotli",
],
},
],
"description": "Compression type used for the cache files.",
"multiple": false,
"simpleType": "string",
},
"cache-hash-algorithm": Object {
"configs": Array [
Object {

5
types.d.ts vendored
View File

@ -3818,6 +3818,11 @@ declare interface FileCacheOptions {
*/
cacheLocation?: string;
/**
* Compression type used for the cache files.
*/
compression?: false | "gzip" | "brotli";
/**
* Algorithm used for generation the hash (see node.js crypto package).
*/