Merge pull request #11140 from webpack/bugfix/avoid-modify-hash

fix HMR bugs with hashing
This commit is contained in:
Tobias Koppers 2020-07-09 10:06:49 +02:00 committed by GitHub
commit f874ac290d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 125 additions and 88 deletions

View File

@ -2416,7 +2416,6 @@ Make sure to select an appropriate stage from Compilation.PROCESS_ASSETS_STAGE_*
if (outputOptions.hashSalt) {
hash.update(outputOptions.hashSalt);
}
this.hooks.fullHash.call(hash);
this.logger.timeEnd("hashing: initialize hash");
if (this.children.length > 0) {
this.logger.time("hashing: hash child compilations");
@ -2510,6 +2509,7 @@ Make sure to select an appropriate stage from Compilation.PROCESS_ASSETS_STAGE_*
this.logger.timeAggregateEnd("hashing: hash runtime modules");
this.logger.timeAggregateEnd("hashing: hash chunks");
this.logger.time("hashing: hash digest");
this.hooks.fullHash.call(hash);
this.fullHash = /** @type {string} */ (hash.digest(hashDigest));
this.hash = this.fullHash.substr(0, hashDigestLength);
this.logger.timeEnd("hashing: hash digest");
@ -2541,22 +2541,6 @@ Make sure to select an appropriate stage from Compilation.PROCESS_ASSETS_STAGE_*
this.logger.timeEnd("hashing: process full hash modules");
}
/**
* @param {string} update extra information
* @returns {void}
*/
modifyHash(update) {
const outputOptions = this.outputOptions;
const hashFunction = outputOptions.hashFunction;
const hashDigest = outputOptions.hashDigest;
const hashDigestLength = outputOptions.hashDigestLength;
const hash = createHash(hashFunction);
hash.update(this.fullHash);
hash.update(update);
this.fullHash = /** @type {string} */ (hash.digest(hashDigest));
this.hash = this.fullHash.substr(0, hashDigestLength);
}
/**
* @param {string} file file name
* @param {Source} source asset source
@ -2944,6 +2928,18 @@ Make sure to select an appropriate stage from Compilation.PROCESS_ASSETS_STAGE_*
}
}
// TODO webpack 6 remove
Object.defineProperty(Compilation.prototype, "modifyHash", {
writable: false,
enumerable: false,
configurable: false,
value: () => {
throw new Error(
"Compilation.modifyHash was removed in favor of Compilation.hooks.fullHash"
);
}
});
/**
* Add additional assets to the compilation.
*/

View File

@ -64,8 +64,6 @@ class HotModuleReplacementPlugin {
constructor(options) {
this.options = options || {};
this.multiStep = this.options.multiStep;
this.fullBuildTimeout = this.options.fullBuildTimeout || 200;
}
/**
@ -74,15 +72,6 @@ class HotModuleReplacementPlugin {
* @returns {void}
*/
apply(compiler) {
const multiStep = this.multiStep;
const fullBuildTimeout = this.fullBuildTimeout;
compiler.hooks.additionalPass.tapAsync(
"HotModuleReplacementPlugin",
callback => {
if (multiStep) return setTimeout(callback, fullBuildTimeout);
return callback();
}
);
const runtimeRequirements = [RuntimeGlobals.module];
const createAcceptHandler = (parser, ParamDependency) => {
@ -293,19 +282,19 @@ class HotModuleReplacementPlugin {
);
//#endregion
let hotIndex = 0;
const fullHashModuleHashes = {};
const moduleHashes = {};
compilation.hooks.record.tap(
"HotModuleReplacementPlugin",
(compilation, records) => {
if (records.hash === compilation.hash) return;
const chunkGraph = compilation.chunkGraph;
records.hash = compilation.hash;
records.moduleHashs = {};
for (const module of compilation.modules) {
const identifier = module.identifier();
records.moduleHashs[identifier] = chunkGraph.getModuleHash(
module
);
}
records.hotIndex = hotIndex;
records.fullHashModuleHashes = fullHashModuleHashes;
records.moduleHashes = moduleHashes;
records.chunkHashs = {};
for (const chunk of compilation.chunks) {
records.chunkHashs[chunk.id] = chunk.hash;
@ -324,38 +313,53 @@ class HotModuleReplacementPlugin {
}
}
);
let initialPass = false;
let recompilation = false;
compilation.hooks.afterHash.tap("HotModuleReplacementPlugin", () => {
let records = compilation.records;
if (!records) {
initialPass = true;
return;
/** @type {Set<Module>} */
const updatedModules = new Set();
/** @type {Set<Module>} */
const lazyHashedModules = new Set();
compilation.hooks.fullHash.tap("HotModuleReplacementPlugin", hash => {
const chunkGraph = compilation.chunkGraph;
const records = compilation.records;
for (const chunk of compilation.chunks) {
const modules = chunkGraph.getChunkFullHashModulesIterable(chunk);
if (modules !== undefined) {
for (const module of modules) {
lazyHashedModules.add(module);
}
}
}
if (!records.hash) initialPass = true;
const preHash = records.preHash || "x";
const prePreHash = records.prePreHash || "x";
if (preHash === compilation.hash) {
recompilation = true;
compilation.modifyHash(prePreHash);
return;
if (records.moduleHashes && records.fullHashModuleHashes) {
for (const module of compilation.modules) {
const identifier = module.identifier();
const hash = chunkGraph.getModuleHash(module);
if (lazyHashedModules.has(module)) {
if (records.fullHashModuleHashes[identifier] !== hash) {
updatedModules.add(module);
}
fullHashModuleHashes[identifier] = hash;
} else {
if (records.moduleHashes[identifier] !== hash) {
updatedModules.add(module);
}
moduleHashes[identifier] = hash;
}
}
} else {
for (const module of compilation.modules) {
const identifier = module.identifier();
const hash = chunkGraph.getModuleHash(module);
if (lazyHashedModules.has(module)) {
fullHashModuleHashes[identifier] = hash;
} else {
moduleHashes[identifier] = hash;
}
}
}
records.prePreHash = records.hash || "x";
records.preHash = compilation.hash;
compilation.modifyHash(records.prePreHash);
hotIndex = records.hotIndex || 0;
if (updatedModules.size > 0) hotIndex++;
hash.update(`${hotIndex}`);
});
compilation.hooks.shouldGenerateChunkAssets.tap(
"HotModuleReplacementPlugin",
() => {
if (multiStep && !recompilation && !initialPass) return false;
}
);
compilation.hooks.needAdditionalPass.tap(
"HotModuleReplacementPlugin",
() => {
if (multiStep && !recompilation && !initialPass) return true;
}
);
compilation.hooks.processAssets.tap(
{
name: "HotModuleReplacementPlugin",
@ -366,19 +370,19 @@ class HotModuleReplacementPlugin {
const records = compilation.records;
if (records.hash === compilation.hash) return;
if (
!records.moduleHashs ||
!records.moduleHashes ||
!records.chunkHashs ||
!records.chunkModuleIds
)
) {
return;
/** @type {Set<Module>} */
const updatedModules = new Set();
for (const module of compilation.modules) {
}
for (const module of lazyHashedModules) {
const identifier = module.identifier();
const hash = chunkGraph.getModuleHash(module);
if (records.moduleHashs[identifier] !== hash) {
if (records.moduleHashes[identifier] !== hash) {
updatedModules.add(module);
}
moduleHashes[identifier] = hash;
}
const hotUpdateMainContent = {
c: [],
@ -417,7 +421,11 @@ class HotModuleReplacementPlugin {
const removedModules = records.chunkModuleIds[chunkId].filter(
id => !allModules.has(id)
);
if (newModules.length > 0 || removedModules.length > 0) {
if (
newModules.length > 0 ||
newRuntimeModules.length > 0 ||
removedModules.length > 0
) {
const hotUpdateChunk = new HotUpdateChunk();
ChunkGraph.setChunkGraphForChunk(hotUpdateChunk, chunkGraph);
hotUpdateChunk.id = chunkId;
@ -429,7 +437,7 @@ class HotModuleReplacementPlugin {
if (newFullHashModules) {
chunkGraph.attachFullHashModules(
hotUpdateChunk,
newRuntimeModules
newFullHashModules
);
}
hotUpdateChunk.removedModules = removedModules;

View File

@ -1,13 +0,0 @@
const { describeCases } = require("./TestCases.template");
const webpack = require("..");
describe("TestCases", () => {
describeCases({
name: "hot-multi-step",
plugins: [
new webpack.HotModuleReplacementPlugin({
multiStep: true
})
]
});
});

View File

@ -12,6 +12,7 @@ it("should generate the main file and change full hash on update", done => {
module.hot.accept("./module", () => {
const hash2 = __webpack_hash__;
expect(hash1).toBeTypeOf("string");
expect(hash2).toBeTypeOf("string");
expect(hash2).not.toBe(hash1);
expect(getFile("bundle.js")).toContain(hash2);
expect(getFile("bundle.js")).not.toContain(hash1);

View File

@ -0,0 +1,3 @@
module.exports = [
[/Module parse failed/]
]

View File

@ -0,0 +1,29 @@
import "./module";
const getFile = name =>
__non_webpack_require__("fs").readFileSync(
__non_webpack_require__("path").join(__dirname, name),
"utf-8"
);
it("should generate the main file and change full hash on update", done => {
const hash1 = __webpack_hash__;
expect(getFile("bundle.js")).toContain(hash1);
import.meta.webpackHot.accept("./module", () => {
const hash2 = __webpack_hash__;
expect(hash1).toBeTypeOf("string");
expect(hash2).toBeTypeOf("string");
expect(hash2).not.toBe(hash1);
expect(getFile("bundle.js")).toContain(hash2);
expect(getFile("bundle.js")).not.toContain(hash1);
done();
});
NEXT(err => {
if (err) return done(err);
NEXT((err, stats) => {
if (err) return done(err);
expect(stats.hash).toBe(hash1);
NEXT(require("../../update")(done));
});
});
});

View File

@ -0,0 +1 @@
import("./thing");

View File

@ -0,0 +1,7 @@
export default 1;
---
export default <<<<<<;
---
export default 1;
---
export default 2;

View File

@ -0,0 +1,8 @@
module.exports = {
node: {
__dirname: false
},
optimization: {
noEmitOnErrors: true
}
};

3
types.d.ts vendored
View File

@ -1174,7 +1174,6 @@ declare class Compilation {
createHash(): void;
fullHash: string;
hash: string;
modifyHash(update: string): void;
emitAsset(file: string, source: Source, assetInfo?: AssetInfo): void;
updateAsset(
file: string,
@ -2857,8 +2856,6 @@ declare interface HashedModuleIdsPluginOptions {
declare class HotModuleReplacementPlugin {
constructor(options?: any);
options: any;
multiStep: any;
fullBuildTimeout: any;
/**
* Apply the plugin