Merge remote-tracking branch 'origin/next' into next

This commit is contained in:
Tobias Koppers 2019-08-13 21:47:24 +02:00
commit 03ffdc56e6
32 changed files with 1267 additions and 153 deletions

11
declarations.d.ts vendored
View File

@ -8,6 +8,12 @@ declare namespace NodeJS {
}
declare module "neo-async" {
interface QueueObject<T, E> {
push(item: T): void;
drain: () => void;
error: (err: E) => void;
}
export interface Dictionary<T> {
[key: string]: T;
}
@ -104,6 +110,11 @@ declare module "neo-async" {
callback?: AsyncResultObjectCallback<T, E>
): void;
export function queue<T, E>(
worker: AsyncFunction<T, E>,
concurrency?: number
): QueueObject<T, E>;
export const forEach: typeof each;
export const forEachLimit: typeof eachLimit;
}

View File

@ -441,6 +441,10 @@ export interface WebpackOptions {
* via the `definition` "MemoryCacheOptions".
*/
export interface MemoryCacheOptions {
/**
* List of paths that are managed by a package manager and can be trusted to not being modified otherwise
*/
managedPaths?: string[];
/**
* In memory caching
*/
@ -451,6 +455,15 @@ export interface MemoryCacheOptions {
* via the `definition` "FileCacheOptions".
*/
export interface FileCacheOptions {
/**
* Dependencies the build depends on (in multiple categories, default categories: 'defaultWebpack')
*/
buildDependencies?: {
/**
* List of dependencies the build depends on
*/
[k: string]: string[];
};
/**
* Base directory for the cache (defaults to node_modules/.cache/webpack).
*/
@ -471,6 +484,10 @@ export interface FileCacheOptions {
* Time in ms after which idle period the initial cache storing should happen (only for store: 'pack' or 'idle')
*/
idleTimeoutForInitialStore?: number;
/**
* List of paths that are managed by a package manager and can be trusted to not being modified otherwise
*/
managedPaths?: string[];
/**
* Name for the cache. Different names will lead to different coexisting caches.
*/
@ -940,7 +957,7 @@ export interface OptimizationSplitChunksOptions {
*/
automaticNameDelimiter?: string;
/**
* Assign modules to a cache group (modules from different cache groups are tried to keep in separate chunks)
* Assign modules to a cache group (modules from different cache groups are tried to keep in separate chunks, default categories: 'default', 'defaultVendors')
*/
cacheGroups?: {
/**

View File

@ -3,6 +3,7 @@
```javascript
console.log(process.env.NODE_ENV);
import "./example.css";
import "react";
import "react-dom";
import "acorn";
@ -17,13 +18,31 @@ const path = require("path");
module.exports = (env = "development") => ({
mode: env,
infrastructureLogging: {
// Optional: print more verbose logging about caching
level: "verbose"
},
cache: {
type: "filesystem",
// changing the cacheDirectory is optional,
// by default it will be in `node_modules/.cache`
cacheDirectory: path.resolve(__dirname, ".cache")
cacheDirectory: path.resolve(__dirname, ".cache"),
// Add additional dependencies to the build
buildDependencies: {
// recommended to invalidate cache on config changes
// This also makes all dependencies of this file build dependencies
config: [__filename]
// By default webpack and loaders are build dependencies
}
},
module: {
rules: [
{
test: /\.css$/,
use: ["style-loader", "css-loader"]
}
]
}
});
```
@ -33,10 +52,10 @@ module.exports = (env = "development") => ({
```
Hash: 0a1b2c3d4e5f6a7b8c9d
Version: webpack 5.0.0-alpha.19
Asset Size Chunks Chunk Names
output.js 1.78 MiB {0} [emitted] main
Asset Size Chunks Chunk Names
output.js 1.8 MiB {0} [emitted] main
Entrypoint main = output.js
chunk {0} output.js (main) 1.54 MiB (javascript) 1.07 KiB (runtime) [entry]
chunk {0} output.js (main) 1.55 MiB (javascript) 1.07 KiB (runtime) [entry]
> ./example.js main
526 chunk modules
530 chunk modules
```

View File

@ -0,0 +1,3 @@
body {
background: red;
}

View File

@ -1,5 +1,6 @@
console.log(process.env.NODE_ENV);
import "./example.css";
import "react";
import "react-dom";
import "acorn";

View File

@ -2,12 +2,30 @@ const path = require("path");
module.exports = (env = "development") => ({
mode: env,
infrastructureLogging: {
// Optional: print more verbose logging about caching
level: "verbose"
},
cache: {
type: "filesystem",
// changing the cacheDirectory is optional,
// by default it will be in `node_modules/.cache`
cacheDirectory: path.resolve(__dirname, ".cache")
cacheDirectory: path.resolve(__dirname, ".cache"),
// Add additional dependencies to the build
buildDependencies: {
// recommended to invalidate cache on config changes
// This also makes all dependencies of this file build dependencies
config: [__filename]
// By default webpack and loaders are build dependencies
}
},
module: {
rules: [
{
test: /\.css$/,
use: ["style-loader", "css-loader"]
}
]
}
});

View File

@ -6,7 +6,10 @@
"use strict";
const { AsyncParallelHook, AsyncSeriesBailHook, SyncHook } = require("tapable");
const { makeWebpackError } = require("./HookWebpackError");
const {
makeWebpackError,
makeWebpackErrorCallback
} = require("./HookWebpackError");
/** @typedef {import("./WebpackError")} WebpackError */
@ -44,6 +47,8 @@ class Cache {
get: new AsyncSeriesBailHook(["identifier", "etag", "gotHandlers"]),
/** @type {AsyncParallelHook<[string, string, any]>} */
store: new AsyncParallelHook(["identifier", "etag", "data"]),
/** @type {AsyncParallelHook<[Iterable<string>]>} */
storeBuildDependencies: new AsyncParallelHook(["dependencies"]),
/** @type {SyncHook<[]>} */
beginIdle: new SyncHook([]),
/** @type {AsyncParallelHook<[]>} */
@ -91,7 +96,25 @@ class Cache {
* @returns {void}
*/
store(identifier, etag, data, callback) {
this.hooks.store.callAsync(identifier, etag, data, callback);
this.hooks.store.callAsync(
identifier,
etag,
data,
makeWebpackErrorCallback(callback, "Cache.hooks.store")
);
}
/**
* After this method has succeeded the cache can only be restored when build dependencies are
* @param {Iterable<string>} dependencies list of all build dependencies
* @param {Callback<void>} callback signals when the dependencies are stored
* @returns {void}
*/
storeBuildDependencies(dependencies, callback) {
this.hooks.storeBuildDependencies.callAsync(
dependencies,
makeWebpackErrorCallback(callback, "Cache.hooks.storeBuildDependencies")
);
}
/**
@ -106,7 +129,9 @@ class Cache {
* @returns {void}
*/
endIdle(callback) {
this.hooks.endIdle.callAsync(callback);
this.hooks.endIdle.callAsync(
makeWebpackErrorCallback(callback, "Cache.hooks.endIdle")
);
}
/**
@ -114,7 +139,9 @@ class Cache {
* @returns {void}
*/
shutdown(callback) {
this.hooks.shutdown.callAsync(callback);
this.hooks.shutdown.callAsync(
makeWebpackErrorCallback(callback, "Cache.hooks.shutdown")
);
}
}

View File

@ -414,7 +414,9 @@ class Compilation {
this.compiler = compiler;
this.resolverFactory = compiler.resolverFactory;
this.inputFileSystem = compiler.inputFileSystem;
this.fileSystemInfo = new FileSystemInfo(this.inputFileSystem);
this.fileSystemInfo = new FileSystemInfo(this.inputFileSystem, {
managedPaths: compiler.managedPaths
});
if (compiler.fileTimestamps) {
this.fileSystemInfo.addFileTimestamps(compiler.fileTimestamps);
}
@ -535,6 +537,8 @@ class Compilation {
this.contextDependencies = new LazySet();
/** @type {LazySet<string>} */
this.missingDependencies = new LazySet();
/** @type {LazySet<string>} */
this.buildDependencies = new LazySet();
}
getStats() {
@ -1421,7 +1425,12 @@ class Compilation {
this.unseal();
return this.seal(callback);
}
return this.hooks.afterSeal.callAsync(callback);
this.cache.storeBuildDependencies(this.buildDependencies, err => {
if (err) {
return callback(err);
}
return this.hooks.afterSeal.callAsync(callback);
});
});
});
});
@ -1837,6 +1846,7 @@ class Compilation {
this.fileDependencies.addAll(child.fileDependencies);
this.contextDependencies.addAll(child.contextDependencies);
this.missingDependencies.addAll(child.missingDependencies);
this.buildDependencies.addAll(child.buildDependencies);
}
for (const module of this.modules) {

View File

@ -176,6 +176,8 @@ class Compiler {
this.recordsOutputPath = null;
this.records = {};
/** @type {Set<string>} */
this.managedPaths = new Set();
/** @type {Set<string>} */
this.removedFiles = new Set();
/** @type {Map<string, FileSystemInfoEntry | null>} */
this.fileTimestamps = new Map();

View File

@ -5,13 +5,28 @@
"use strict";
const resolve = require("enhanced-resolve");
const asyncLib = require("neo-async");
const AsyncQueue = require("./util/AsyncQueue");
const createHash = require("./util/createHash");
const { join, dirname } = require("./util/fs");
/** @typedef {import("./WebpackError")} WebpackError */
/** @typedef {import("./util/fs").InputFileSystem} InputFileSystem */
const resolveContext = resolve.create({
resolveToContext: true
});
let FS_ACCURACY = 2000;
const RBDT_RESOLVE = 0;
const RBDT_RESOLVE_DIRECTORY = 1;
const RBDT_DIRECTORY = 2;
const RBDT_FILE = 3;
const RBDT_DIRECTORY_DEPENDENCIES = 4;
const RBDT_FILE_DEPENDENCIES = 5;
/**
* @typedef {Object} FileSystemInfoEntry
* @property {number} safeTime
@ -20,10 +35,13 @@ let FS_ACCURACY = 2000;
/**
* @typedef {Object} Snapshot
* @property {number} startTime
* @property {Map<string, FileSystemInfoEntry | "error">} fileTimestamps
* @property {Map<string, FileSystemInfoEntry | "error">} contextTimestamps
* @property {Map<string, FileSystemInfoEntry | "error">} missingTimestamps
* @property {number=} startTime
* @property {Map<string, FileSystemInfoEntry | "error">=} fileTimestamps
* @property {Map<string, string | "error">=} fileHashes
* @property {Map<string, FileSystemInfoEntry | "error">=} contextTimestamps
* @property {Map<string, string | "error">=} contextHashes
* @property {Map<string, FileSystemInfoEntry | "error">=} missingTimestamps
* @property {Map<string, string | "error">=} managedItemInfo
*/
/* istanbul ignore next */
@ -34,26 +52,102 @@ const applyMtime = mtime => {
else if (FS_ACCURACY > 1000 && mtime % 2000 !== 0) FS_ACCURACY = 1000;
};
const mergeMaps = (a, b) => {
if (!b || b.size === 0) return a;
if (!a || a.size === 0) return b;
const map = new Map(a);
for (const [key, value] of b) {
map.set(key, value);
}
return map;
};
const getManagedItem = (managedPath, path) => {
let i = managedPath.length;
let slashes = 1;
loop: while (i < path.length) {
switch (path.charCodeAt(i)) {
case 47: // slash
case 92: // backslash
if (--slashes === 0) break loop;
break;
case 64: // @
slashes++;
break;
}
i++;
}
// if (path.slice(i + 1, i + 13) === "node_modules")
if (
path.charCodeAt(i + 1) === 110 &&
path.charCodeAt(i + 2) === 111 &&
path.charCodeAt(i + 3) === 100 &&
path.charCodeAt(i + 4) === 101 &&
path.charCodeAt(i + 5) === 95 &&
path.charCodeAt(i + 6) === 109 &&
path.charCodeAt(i + 7) === 111 &&
path.charCodeAt(i + 8) === 100 &&
path.charCodeAt(i + 9) === 117 &&
path.charCodeAt(i + 10) === 108 &&
path.charCodeAt(i + 11) === 101 &&
path.charCodeAt(i + 12) === 115
) {
const c = path.charCodeAt(i + 13);
if (c === 47 || c === 92) {
// Managed subpath
return getManagedItem(path.slice(0, i + 14), path);
}
}
return path.slice(0, i);
};
class FileSystemInfo {
/**
* @param {InputFileSystem} fs file system
* @param {Object} options options
* @param {Iterable<string>=} options.managedPaths paths that are only managed by a package manager
*/
constructor(fs) {
constructor(fs, { managedPaths = [] } = {}) {
this.fs = fs;
/** @type {Map<string, FileSystemInfoEntry | null>} */
this._fileTimestamps = new Map();
/** @type {Map<string, string>} */
this._fileHashes = new Map();
/** @type {Map<string, FileSystemInfoEntry | null>} */
this._contextTimestamps = new Map();
/** @type {Map<string, string>} */
this._contextHashes = new Map();
/** @type {Map<string, string>} */
this._managedItems = new Map();
this.fileTimestampQueue = new AsyncQueue({
name: "file timestamp",
parallelism: 30,
processor: this._readFileTimestamp.bind(this)
});
this.fileHashQueue = new AsyncQueue({
name: "file hash",
parallelism: 10,
processor: this._readFileHash.bind(this)
});
this.contextTimestampQueue = new AsyncQueue({
name: "context timestamp",
parallelism: 2,
processor: this._readContextTimestamp.bind(this)
});
this.contextHashQueue = new AsyncQueue({
name: "context hash",
parallelism: 2,
processor: this._readContextHash.bind(this)
});
this.managedItemQueue = new AsyncQueue({
name: "managed item info",
parallelism: 10,
processor: this._getManagedItemInfo.bind(this)
});
this.managedPaths = Array.from(managedPaths);
this.managedPathsWithSlash = this.managedPaths.map(p =>
join(fs, p, "_").slice(0, -1)
);
}
/**
@ -98,6 +192,195 @@ class FileSystemInfo {
this.contextTimestampQueue.add(path, callback);
}
/**
* @param {string} path file path
* @param {function(WebpackError=, string=): void} callback callback function
* @returns {void}
*/
getFileHash(path, callback) {
const cache = this._fileHashes.get(path);
if (cache !== undefined) return callback(null, cache);
this.fileHashQueue.add(path, callback);
}
/**
* @param {string} path context path
* @param {function(WebpackError=, string=): void} callback callback function
* @returns {void}
*/
getContextHash(path, callback) {
const cache = this._contextHashes.get(path);
if (cache !== undefined) return callback(null, cache);
this.contextHashQueue.add(path, callback);
}
resolveBuildDependencies(context, deps, callback) {
const files = new Set();
const directories = new Set();
const missing = new Set();
/** @type {asyncLib.QueueObject<{type: number, path: string, context?: string }, Error>} */
const queue = asyncLib.queue(({ type, context, path }, callback) => {
switch (type) {
case RBDT_RESOLVE: {
const isDirectory = /[\\/]$/.test(path);
const isDeps = /^deps:/.test(path);
if (isDeps) path = path.slice(5);
if (isDirectory) {
resolveContext(
context,
path.replace(/[\\/]$/, ""),
(err, result) => {
if (err) return callback(err);
queue.push({
type: isDeps ? RBDT_DIRECTORY_DEPENDENCIES : RBDT_DIRECTORY,
path: result
});
callback();
}
);
} else {
resolve(context, path, (err, result) => {
if (err) return callback(err);
queue.push({
type: isDeps ? RBDT_FILE_DEPENDENCIES : RBDT_FILE,
path: result
});
callback();
});
}
break;
}
case RBDT_RESOLVE_DIRECTORY: {
resolveContext(context, path, (err, result) => {
if (err) return callback(err);
queue.push({
type: RBDT_DIRECTORY,
path: result
});
callback();
});
break;
}
case RBDT_FILE: {
if (files.has(path)) {
callback();
break;
}
this.fs.realpath(path, (err, realPath) => {
if (err) return callback(err);
if (!files.has(realPath)) {
files.add(realPath);
queue.push({
type: RBDT_FILE_DEPENDENCIES,
path: realPath
});
}
callback();
});
break;
}
case RBDT_DIRECTORY: {
if (directories.has(path)) {
callback();
break;
}
this.fs.realpath(path, (err, realPath) => {
if (err) return callback(err);
if (!directories.has(realPath)) {
directories.add(realPath);
queue.push({
type: RBDT_DIRECTORY_DEPENDENCIES,
path: realPath
});
}
callback();
});
break;
}
case RBDT_FILE_DEPENDENCIES: {
/** @type {NodeModule} */
const module = require.cache[path];
if (module && Array.isArray(module.children)) {
for (const child of module.children) {
if (child.id) {
queue.push({
type: RBDT_FILE,
path: child.id
});
}
}
} else {
// Unable to get dependencies from module system
// This may be because of an incomplete require.cache implementation like in jest
// Assume requires stay in directory and add the whole directory
const directory = dirname(this.fs, path);
queue.push({
type: RBDT_DIRECTORY,
path: directory
});
}
callback();
break;
}
case RBDT_DIRECTORY_DEPENDENCIES: {
const match = /(^.+[\\/]node_modules[\\/](?:@[^\\/]+[\\/])?[^\\/]+)/.exec(
path
);
const packagePath = match ? match[1] : path;
const packageJson = join(this.fs, packagePath, "package.json");
this.fs.readFile(packageJson, (err, content) => {
if (err) {
if (err.code === "ENOENT") {
const parent = dirname(this.fs, packagePath);
if (parent !== packagePath) {
queue.push({
type: RBDT_DIRECTORY_DEPENDENCIES,
path: parent
});
}
callback();
return;
}
return callback(err);
}
let packageData;
try {
packageData = JSON.parse(content.toString("utf-8"));
} catch (e) {
return callback(e);
}
const depsObject = packageData.dependencies;
if (typeof depsObject === "object" && depsObject) {
for (const dep of Object.keys(depsObject)) {
queue.push({
type: RBDT_RESOLVE_DIRECTORY,
context: packagePath,
path: dep
});
}
}
callback();
});
break;
}
}
}, 50);
queue.drain = () => {
callback(null, { files, directories, missing });
};
queue.error = err => {
callback(err);
callback = () => {};
};
for (const dep of deps) {
queue.push({
type: RBDT_RESOLVE,
context,
path: dep
});
}
}
/**
*
* @param {number} startTime when processing the files has started
@ -111,47 +394,130 @@ class FileSystemInfo {
createSnapshot(startTime, files, directories, missing, options, callback) {
/** @type {Map<string, FileSystemInfoEntry | "error">} */
const fileTimestamps = new Map();
/** @type {Map<string, string | "error">} */
const fileHashes = new Map();
/** @type {Map<string, FileSystemInfoEntry | "error">} */
const contextTimestamps = new Map();
/** @type {Map<string, string | "error">} */
const contextHashes = new Map();
/** @type {Map<string, FileSystemInfoEntry | "error">} */
const missingTimestamps = new Map();
/** @type {Map<string, string | "error">} */
const managedItemInfo = new Map();
const managedItems = new Set();
let jobs = 1;
const jobDone = () => {
if (--jobs === 0) {
callback(null, {
startTime,
fileTimestamps,
contextTimestamps,
missingTimestamps
});
const snapshot = {};
if (startTime) snapshot.startTime = startTime;
if (fileTimestamps.size !== 0) snapshot.fileTimestamps = fileTimestamps;
if (fileHashes.size !== 0) snapshot.fileHashes = fileHashes;
if (contextTimestamps.size !== 0)
snapshot.contextTimestamps = contextTimestamps;
if (contextHashes.size !== 0) snapshot.contextHashes = contextHashes;
if (missingTimestamps.size !== 0)
snapshot.missingTimestamps = missingTimestamps;
if (managedItemInfo.size !== 0)
snapshot.managedItemInfo = managedItemInfo;
callback(null, snapshot);
}
};
if (files) {
for (const path of files) {
const cache = this._fileTimestamps.get(path);
if (cache !== undefined) {
fileTimestamps.set(path, cache);
} else {
jobs++;
this.fileTimestampQueue.add(path, (err, entry) => {
if (err) {
fileTimestamps.set(path, "error");
} else {
fileTimestamps.set(path, entry);
if (options && options.hash) {
files: for (const path of files) {
for (const managedPath of this.managedPathsWithSlash) {
if (path.startsWith(managedPath)) {
managedItems.add(getManagedItem(managedPath, path));
continue files;
}
jobDone();
});
}
const cache = this._fileHashes.get(path);
if (cache !== undefined) {
fileHashes.set(path, cache);
} else {
jobs++;
this.fileHashQueue.add(path, (err, entry) => {
if (err) {
fileHashes.set(path, "error");
} else {
fileHashes.set(path, entry);
}
jobDone();
});
}
}
} else {
files: for (const path of files) {
for (const managedPath of this.managedPathsWithSlash) {
if (path.startsWith(managedPath)) {
managedItems.add(getManagedItem(managedPath, path));
continue files;
}
}
const cache = this._fileTimestamps.get(path);
if (cache !== undefined) {
fileTimestamps.set(path, cache);
} else {
jobs++;
this.fileTimestampQueue.add(path, (err, entry) => {
if (err) {
fileTimestamps.set(path, "error");
} else {
fileTimestamps.set(path, entry);
}
jobDone();
});
}
}
}
}
if (directories) {
for (const path of directories) {
contextTimestamps.set(path, "error");
// TODO: getContextTimestamp is not implemented yet
if (options && options.hash) {
directories: for (const path of directories) {
for (const managedPath of this.managedPathsWithSlash) {
if (path.startsWith(managedPath)) {
managedItems.add(getManagedItem(managedPath, path));
continue directories;
}
}
const cache = this._contextHashes.get(path);
if (cache !== undefined) {
contextHashes.set(path, cache);
} else {
jobs++;
this.contextHashQueue.add(path, (err, entry) => {
if (err) {
contextHashes.set(path, "error");
} else {
contextHashes.set(path, entry);
}
jobDone();
});
}
}
} else {
directories: for (const path of directories) {
for (const managedPath of this.managedPathsWithSlash) {
if (path.startsWith(managedPath)) {
managedItems.add(getManagedItem(managedPath, path));
continue directories;
}
}
contextTimestamps.set(path, "error");
// TODO: getContextTimestamp is not implemented yet
}
}
}
if (missing) {
for (const path of missing) {
missing: for (const path of missing) {
for (const managedPath of this.managedPathsWithSlash) {
if (path.startsWith(managedPath)) {
managedItems.add(getManagedItem(managedPath, path));
continue missing;
}
}
const cache = this._fileTimestamps.get(path);
if (cache !== undefined) {
missingTimestamps.set(path, cache);
@ -168,9 +534,76 @@ class FileSystemInfo {
}
}
}
for (const path of managedItems) {
const cache = this._managedItems.get(path);
if (cache !== undefined) {
managedItemInfo.set(path, cache);
} else {
jobs++;
this.managedItemQueue.add(path, (err, entry) => {
if (err) {
managedItemInfo.set(path, "error");
} else {
managedItemInfo.set(path, entry);
}
jobDone();
});
}
}
jobDone();
}
/**
* @param {Snapshot} snapshot1 a snapshot
* @param {Snapshot} snapshot2 a snapshot
* @returns {Snapshot} merged snapshot
*/
mergeSnapshots(snapshot1, snapshot2) {
/** @type {Snapshot} */
const snapshot = {};
if (snapshot1.startTime && snapshot2.startTime)
snapshot.startTime = Math.min(snapshot1.startTime, snapshot2.startTime);
else if (snapshot2.startTime) snapshot.startTime = snapshot2.startTime;
else if (snapshot1.startTime) snapshot.startTime = snapshot1.startTime;
if (snapshot1.fileTimestamps || snapshot2.fileTimestamps) {
snapshot.fileTimestamps = mergeMaps(
snapshot1.fileTimestamps,
snapshot2.fileTimestamps
);
}
if (snapshot1.fileHashes || snapshot2.fileHashes) {
snapshot.fileHashes = mergeMaps(
snapshot1.fileHashes,
snapshot2.fileHashes
);
}
if (snapshot1.contextTimestamps || snapshot2.contextTimestamps) {
snapshot.contextTimestamps = mergeMaps(
snapshot1.contextTimestamps,
snapshot2.contextTimestamps
);
}
if (snapshot1.contextHashes || snapshot2.contextHashes) {
snapshot.contextHashes = mergeMaps(
snapshot1.contextHashes,
snapshot2.contextHashes
);
}
if (snapshot1.missingTimestamps || snapshot2.missingTimestamps) {
snapshot.missingTimestamps = mergeMaps(
snapshot1.missingTimestamps,
snapshot2.missingTimestamps
);
}
if (snapshot1.managedItemInfo || snapshot2.managedItemInfo) {
snapshot.managedItemInfo = mergeMaps(
snapshot1.managedItemInfo,
snapshot2.managedItemInfo
);
}
return snapshot;
}
/**
* @param {Snapshot} snapshot the snapshot made
* @param {function(WebpackError=, boolean=): void} callback callback function
@ -180,8 +613,11 @@ class FileSystemInfo {
const {
startTime,
fileTimestamps,
fileHashes,
contextTimestamps,
missingTimestamps
contextHashes,
missingTimestamps,
managedItemInfo
} = snapshot;
let jobs = 1;
const jobDone = () => {
@ -195,6 +631,14 @@ class FileSystemInfo {
callback(null, false);
}
};
const checkHash = (current, snap) => {
if (snap === "error") {
// If there was an error while snapshotting (i. e. EBUSY)
// we can't compare further data and assume it's invalid
return false;
}
return current === snap;
};
/**
* @param {FileSystemInfoEntry} current current entry
* @param {FileSystemInfoEntry | "error"} snap entry from snapshot
@ -242,51 +686,113 @@ class FileSystemInfo {
}
return true;
};
for (const [path, ts] of fileTimestamps) {
const cache = this._fileTimestamps.get(path);
if (cache !== undefined) {
if (!checkFile(cache, ts)) {
invalid();
}
} else {
jobs++;
this.fileTimestampQueue.add(path, (err, entry) => {
if (err) return invalid();
if (!checkFile(entry, ts)) {
if (fileTimestamps) {
for (const [path, ts] of fileTimestamps) {
const cache = this._fileTimestamps.get(path);
if (cache !== undefined) {
if (!checkFile(cache, ts)) {
invalid();
} else {
jobDone();
}
});
} else {
jobs++;
this.fileTimestampQueue.add(path, (err, entry) => {
if (err) return invalid();
if (!checkFile(entry, ts)) {
invalid();
} else {
jobDone();
}
});
}
}
}
if (contextTimestamps.size > 0) {
if (fileHashes) {
for (const [path, hash] of fileHashes) {
const cache = this._fileHashes.get(path);
if (cache !== undefined) {
if (!checkHash(cache, hash)) {
invalid();
}
} else {
jobs++;
this.fileHashQueue.add(path, (err, entry) => {
if (err) return invalid();
if (!checkHash(entry, hash)) {
invalid();
} else {
jobDone();
}
});
}
}
}
if (contextTimestamps && contextTimestamps.size > 0) {
// TODO: getContextTimestamp is not implemented yet
invalid();
}
for (const [path, ts] of missingTimestamps) {
const cache = this._fileTimestamps.get(path);
if (cache !== undefined) {
if (!checkExistance(cache, ts)) {
invalid();
}
} else {
jobs++;
this.fileTimestampQueue.add(path, (err, entry) => {
if (err) return invalid();
if (!checkExistance(entry, ts)) {
if (contextHashes) {
for (const [path, hash] of contextHashes) {
const cache = this._contextHashes.get(path);
if (cache !== undefined) {
if (!checkHash(cache, hash)) {
invalid();
} else {
jobDone();
}
});
} else {
jobs++;
this.contextHashQueue.add(path, (err, entry) => {
if (err) return invalid();
if (!checkHash(entry, hash)) {
invalid();
} else {
jobDone();
}
});
}
}
}
if (missingTimestamps) {
for (const [path, ts] of missingTimestamps) {
const cache = this._fileTimestamps.get(path);
if (cache !== undefined) {
if (!checkExistance(cache, ts)) {
invalid();
}
} else {
jobs++;
this.fileTimestampQueue.add(path, (err, entry) => {
if (err) return invalid();
if (!checkExistance(entry, ts)) {
invalid();
} else {
jobDone();
}
});
}
}
}
if (managedItemInfo) {
for (const [path, info] of managedItemInfo) {
const cache = this._managedItems.get(path);
if (cache !== undefined) {
if (!checkHash(cache, info)) {
invalid();
}
} else {
jobs++;
this.managedItemQueue.add(path, (err, entry) => {
if (err) return invalid();
if (!checkHash(entry, info)) {
invalid();
} else {
jobDone();
}
});
}
}
}
jobDone();
}
// TODO getFileHash(path, callback)
_readFileTimestamp(path, callback) {
this.fs.stat(path, (err, stat) => {
if (err) {
@ -312,12 +818,99 @@ class FileSystemInfo {
});
}
_readFileHash(path, callback) {
this.fs.readFile(path, (err, content) => {
if (err) {
if (err.code === "ENOENT") {
this._fileHashes.set(path, null);
return callback(null, null);
}
return callback(err);
}
const hash = createHash("md4");
hash.update(content);
const digest = /** @type {string} */ (hash.digest("hex"));
this._fileHashes.set(path, digest);
callback(null, digest);
});
}
_readContextTimestamp(path, callback) {
// TODO read whole folder
this._contextTimestamps.set(path, null);
callback(null, null);
}
_readContextHash(path, callback) {
this.fs.readdir(path, (err, files) => {
if (err) {
if (err.code === "ENOENT") {
this._contextHashes.set(path, null);
return callback(null, null);
}
return callback(err);
}
files = files
.map(file => file.normalize("NFC"))
.filter(file => !/^\./.test(file))
.sort();
asyncLib.map(
files,
(file, callback) => {
const child = join(this.fs, path, file);
this.fs.stat(child, (err, stat) => {
if (err) return callback(err);
if (stat.isFile()) {
return this.getFileHash(child, callback);
}
if (stat.isDirectory()) {
this.contextHashQueue.increaseParallelism();
this.getContextHash(child, (err, hash) => {
this.contextHashQueue.decreaseParallelism();
callback(err, hash || "");
});
return;
}
callback(null, "");
});
},
(err, fileHashes) => {
const hash = createHash("md4");
for (const file of files) hash.update(file);
for (const h of fileHashes) hash.update(h);
const digest = /** @type {string} */ (hash.digest("hex"));
this._contextHashes.set(path, digest);
callback(null, digest);
}
);
});
}
_getManagedItemInfo(path, callback) {
const packageJsonPath = join(this.fs, path, "package.json");
this.fs.readFile(packageJsonPath, (err, content) => {
if (err) return callback(err);
let data;
try {
data = JSON.parse(content.toString("utf-8"));
} catch (e) {
return callback(e);
}
const info = `${data.name || ""}@${data.version || ""}`;
callback(null, info);
});
}
getDeprecatedFileTimestamps() {
const map = new Map();
for (const [path, info] of this._fileTimestamps) {

View File

@ -9,6 +9,14 @@ const WebpackError = require("./WebpackError");
/** @typedef {import("./Module")} Module */
/**
* @template T
* @callback Callback
* @param {Error=} err
* @param {T=} stats
* @returns {void}
*/
class HookWebpackError extends WebpackError {
/**
* Creates an instance of HookWebpackError.
@ -46,7 +54,7 @@ module.exports.makeWebpackError = makeWebpackError;
* @template T
* @param {function(WebpackError=, T=): void} callback webpack error callback
* @param {string} hook name of hook
* @returns {function(Error=, T=): void} generic callback
* @returns {Callback<T>} generic callback
*/
const makeWebpackErrorCallback = (callback, hook) => {
return (err, result) => {

View File

@ -445,6 +445,9 @@ class NormalModule extends Module {
null
);
}
for (const loader of this.loaders) {
compilation.buildDependencies.add(loader.loader);
}
this.buildInfo.fileDependencies = new Set(result.fileDependencies);
this.buildInfo.contextDependencies = new Set(
result.contextDependencies

View File

@ -540,6 +540,8 @@ class WebpackOptionsApply extends OptionsApply {
if (options.cache && typeof options.cache === "object") {
const cacheOptions = options.cache;
const AddManagedPathsPlugin = require("./cache/AddManagedPathsPlugin");
new AddManagedPathsPlugin(cacheOptions.managedPaths).apply(compiler);
switch (cacheOptions.type) {
case "memory": {
const MemoryCachePlugin = require("./cache/MemoryCachePlugin");
@ -547,6 +549,11 @@ class WebpackOptionsApply extends OptionsApply {
break;
}
case "filesystem": {
const AddBuildDependenciesPlugin = require("./cache/AddBuildDependenciesPlugin");
for (const key in cacheOptions.buildDependencies) {
const list = cacheOptions.buildDependencies[key];
new AddBuildDependenciesPlugin(list).apply(compiler);
}
const MemoryCachePlugin = require("./cache/MemoryCachePlugin");
new MemoryCachePlugin().apply(compiler);
switch (cacheOptions.store) {
@ -606,11 +613,13 @@ class WebpackOptionsApply extends OptionsApply {
new IdleFileCachePlugin(
new PackFileCacheStrategy({
fs: compiler.intermediateFileSystem,
context: options.context,
cacheLocation: cacheOptions.cacheLocation,
version: cacheOptions.version,
logger: compiler.getInfrastructureLogger(
"webpack.cache.PackFileCacheStrategy"
)
),
managedPaths: cacheOptions.managedPaths
}),
cacheOptions.idleTimeout,
cacheOptions.idleTimeoutForInitialStore

View File

@ -89,9 +89,22 @@ class WebpackOptionsDefaulter extends OptionsDefaulter {
if (value.idleTimeoutForInitialStore === undefined) {
value.idleTimeoutForInitialStore = 0;
}
value.buildDependencies = Object.assign({}, value.buildDependencies);
if (value.buildDependencies.defaultWebpack === undefined) {
value.buildDependencies.defaultWebpack = [__dirname + path.sep];
}
}
return value;
});
this.set("cache.managedPaths", "make", () => {
const match = /^(.+?[\\/]node_modules[\\/])/.exec(
// eslint-disable-next-line node/no-extraneous-require
require.resolve("watchpack")
);
if (match) {
return [match[1]];
}
});
this.set("context", process.cwd());
this.set("target", "web");

32
lib/cache/AddBuildDependenciesPlugin.js vendored Normal file
View File

@ -0,0 +1,32 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
/** @typedef {import("../Compiler")} Compiler */
class AddBuildDependenciesPlugin {
/**
* @param {Iterable<string>} buildDependencies list of build dependencies
*/
constructor(buildDependencies) {
this.buildDependencies = new Set(buildDependencies);
}
/**
* @param {Compiler} compiler Webpack compiler
* @returns {void}
*/
apply(compiler) {
compiler.hooks.compilation.tap(
"AddBuildDependenciesPlugin",
compilation => {
compilation.buildDependencies.addAll(this.buildDependencies);
}
);
}
}
module.exports = AddBuildDependenciesPlugin;

29
lib/cache/AddManagedPathsPlugin.js vendored Normal file
View File

@ -0,0 +1,29 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
/** @typedef {import("../Compiler")} Compiler */
class AddManagedPathsPlugin {
/**
* @param {Iterable<string>} managedPaths list of managed paths
*/
constructor(managedPaths) {
this.managedPaths = new Set(managedPaths);
}
/**
* @param {Compiler} compiler Webpack compiler
* @returns {void}
*/
apply(compiler) {
for (const managedPath of this.managedPaths) {
compiler.managedPaths.add(managedPath);
}
}
}
module.exports = AddManagedPathsPlugin;

View File

@ -9,6 +9,8 @@ const Cache = require("../Cache");
/** @typedef {import("../Compiler")} Compiler */
const BUILD_DEPENDENCIES_KEY = Symbol();
class IdleFileCachePlugin {
/**
* @param {TODO} strategy cache strategy
@ -35,7 +37,7 @@ class IdleFileCachePlugin {
const resolvedPromise = Promise.resolve();
/** @type {Map<string, () => Promise>} */
/** @type {Map<string | typeof BUILD_DEPENDENCIES_KEY, () => Promise>} */
const pendingIdleTasks = new Map();
compiler.cache.hooks.store.tap(
@ -67,6 +69,15 @@ class IdleFileCachePlugin {
}
);
compiler.cache.hooks.storeBuildDependencies.tap(
{ name: "IdleFile", stage: Cache.STAGE_DISK },
dependencies => {
pendingIdleTasks.set(BUILD_DEPENDENCIES_KEY, () =>
strategy.storeBuildDependencies(dependencies)
);
}
);
compiler.cache.hooks.shutdown.tapPromise(
{ name: "IdleFileCachePlugin", stage: Cache.STAGE_DISK },
() => {

View File

@ -5,6 +5,8 @@
"use strict";
const FileSystemInfo = require("../FileSystemInfo");
const LazySet = require("../util/LazySet");
const makeSerializable = require("../util/makeSerializable");
const {
createFileSerializer,
@ -15,6 +17,32 @@ const {
const MAX_INLINE_SIZE = 20000;
class DataWithBuildSnapshot {
constructor(data, buildSnapshot, buildDependencies) {
this.data = data;
this.buildSnapshot = buildSnapshot;
this.buildDependencies = buildDependencies;
}
serialize({ write }) {
write(this.buildSnapshot);
write(this.buildDependencies);
write(this.data);
}
deserialize({ read }) {
this.buildSnapshot = read();
this.buildDependencies = read();
this.data = read();
}
}
makeSerializable(
DataWithBuildSnapshot,
"webpack/lib/cache/PackFileCacheStrategy",
"DataWithBuildSnapshot"
);
class Pack {
constructor(version, logger) {
this.version = version;
@ -47,7 +75,16 @@ class Pack {
set(identifier, etag, data) {
if (this.unserializable.has(identifier)) return;
this.used.add(identifier);
this.invalid = true;
if (!this.invalid) {
if (
this.content.get(identifier) === data &&
this.etags.get(identifier) === etag
) {
return;
}
this.invalid = true;
this.logger.debug(`Pack got invalid because of ${identifier}`);
}
this.etags.set(identifier, etag);
return this.content.set(identifier, data);
}
@ -191,15 +228,52 @@ makeSerializable(
);
class PackFileCacheStrategy {
constructor({ fs, cacheLocation, version, logger }) {
constructor({ fs, context, cacheLocation, version, logger, managedPaths }) {
this.fileSerializer = createFileSerializer(fs);
this.fileSystemInfo = new FileSystemInfo(fs, { managedPaths });
this.context = context;
this.cacheLocation = cacheLocation;
this.logger = logger;
this.buildSnapshot = undefined;
this.oldBuildDependencies = new Set();
this.buildDependencies = new LazySet();
let buildSnapshot;
let buildDependencies;
logger.time("restore pack");
this.packPromise = this.fileSerializer
.deserialize({ filename: `${cacheLocation}.pack`, logger })
.then(cacheEntry => {
logger.timeEnd("restore pack");
if (cacheEntry instanceof DataWithBuildSnapshot) {
logger.timeEnd("restore pack");
return new Promise((resolve, reject) => {
logger.time("check build dependencies");
this.fileSystemInfo.checkSnapshotValid(
cacheEntry.buildSnapshot,
(err, valid) => {
if (err) return reject(err);
logger.timeEnd("check build dependencies");
if (!valid) {
logger.log(
`Restored pack from ${cacheLocation}.pack, but build dependencies have changed.`
);
return resolve(undefined);
}
buildSnapshot = cacheEntry.buildSnapshot;
buildDependencies = cacheEntry.buildDependencies;
logger.time("restore pack content");
return resolve(
cacheEntry.data().then(d => {
logger.timeEnd("restore pack content");
return d;
})
);
}
);
});
}
return cacheEntry;
})
.then(cacheEntry => {
if (cacheEntry) {
if (!(cacheEntry instanceof Pack)) {
logger.warn(
@ -213,6 +287,8 @@ class PackFileCacheStrategy {
);
return new Pack(version, logger);
}
this.buildSnapshot = buildSnapshot;
this.oldBuildDependencies = buildDependencies;
return cacheEntry;
}
return new Pack(version, logger);
@ -248,30 +324,98 @@ class PackFileCacheStrategy {
});
}
storeBuildDependencies(dependencies) {
this.buildDependencies.addAll(dependencies);
}
afterAllStored() {
return this.packPromise.then(pack => {
if (!pack.invalid) return;
this.logger.log(`Storing pack...`);
this.logger.time(`store pack`);
pack.collectGarbage(1000 * 60 * 60 * 24 * 2);
// You might think this breaks all access to the existing pack
// which are still referenced, but serializing the pack memorizes
// all data in the pack and makes it no longer need the backing file
// So it's safe to replace the pack file
return this.fileSerializer
.serialize(pack, {
filename: `${this.cacheLocation}.pack`,
logger: this.logger
})
.then(() => {
this.logger.timeEnd(`store pack`);
this.logger.log(`Stored pack`);
})
.catch(err => {
this.logger.timeEnd(`store pack`);
this.logger.warn(`Caching failed for pack: ${err}`);
this.logger.debug(err.stack);
let promise;
const newBuildDependencies = new Set();
for (const dep of this.buildDependencies) {
if (!this.oldBuildDependencies.has(dep)) {
newBuildDependencies.add(dep);
this.oldBuildDependencies.add(dep);
}
}
this.buildDependencies.clear();
if (newBuildDependencies.size > 0) {
this.logger.debug(
`Capturing build dependencies... (${Array.from(
newBuildDependencies
).join(", ")})`
);
promise = new Promise((resolve, reject) => {
this.logger.time("resolve build dependencies");
this.fileSystemInfo.resolveBuildDependencies(
this.context,
newBuildDependencies,
(err, result) => {
if (err) return reject(err);
this.logger.timeEnd("resolve build dependencies");
this.logger.time("snapshot build dependencies");
const { files, directories, missing } = result;
this.fileSystemInfo.createSnapshot(
undefined,
files,
directories,
missing,
{ hash: true },
(err, snapshot) => {
if (err) return reject(err);
this.logger.timeEnd("snapshot build dependencies");
this.logger.debug("Captured build dependencies");
if (this.buildSnapshot) {
this.buildSnapshot = this.fileSystemInfo.mergeSnapshots(
this.buildSnapshot,
snapshot
);
} else {
this.buildSnapshot = snapshot;
}
resolve();
}
);
}
);
});
} else {
promise = Promise.resolve();
}
return promise.then(() => {
this.logger.log(`Storing pack...`);
this.logger.time(`store pack`);
pack.collectGarbage(1000 * 60 * 60 * 24 * 2);
const content = this.buildSnapshot
? new DataWithBuildSnapshot(
() => pack,
this.buildSnapshot,
this.oldBuildDependencies
)
: pack;
// You might think this breaks all access to the existing pack
// which are still referenced, but serializing the pack memorizes
// all data in the pack and makes it no longer need the backing file
// So it's safe to replace the pack file
return this.fileSerializer
.serialize(content, {
filename: `${this.cacheLocation}.pack`,
logger: this.logger
})
.then(() => {
this.logger.timeEnd(`store pack`);
this.logger.log(`Stored pack`);
})
.catch(err => {
this.logger.timeEnd(`store pack`);
this.logger.warn(`Caching failed for pack: ${err}`);
this.logger.debug(err.stack);
});
});
});
}
}

View File

@ -85,6 +85,8 @@ class SeparateFilesCacheStrategy {
});
}
storeBuildDependencies() {}
afterAllStored() {}
}

View File

@ -280,6 +280,7 @@ class BinaryMiddleware extends SerializerMiddleware {
deserialize(data, context) {
let currentDataItem = 0;
let currentBuffer = data[0];
let currentIsBuffer = Buffer.isBuffer(currentBuffer);
let currentPosition = 0;
const checkOverflow = () => {
if (currentPosition >= currentBuffer.length) {
@ -287,26 +288,37 @@ class BinaryMiddleware extends SerializerMiddleware {
currentDataItem++;
currentBuffer =
currentDataItem < data.length ? data[currentDataItem] : null;
currentIsBuffer = Buffer.isBuffer(currentBuffer);
}
};
const isInCurrentBuffer = n => {
return currentIsBuffer && n + currentPosition <= currentBuffer.length;
};
const read = n => {
if (currentBuffer === null) throw new Error("Unexpected end of stream");
if (!Buffer.isBuffer(currentBuffer))
if (!currentIsBuffer) {
if (currentBuffer === null) throw new Error("Unexpected end of stream");
throw new Error("Unexpected lazy element in stream");
}
const rem = currentBuffer.length - currentPosition;
if (rem < n) {
return Buffer.concat([read(rem), read(n - rem)]);
}
const res = currentBuffer.slice(currentPosition, currentPosition + n);
const res = /** @type {Buffer} */ (currentBuffer).slice(
currentPosition,
currentPosition + n
);
currentPosition += n;
checkOverflow();
return res;
};
const readU8 = () => {
if (currentBuffer === null) throw new Error("Unexpected end of stream");
if (!Buffer.isBuffer(currentBuffer))
if (!currentIsBuffer) {
if (currentBuffer === null) throw new Error("Unexpected end of stream");
throw new Error("Unexpected lazy element in stream");
const byte = currentBuffer.readUInt8(currentPosition);
}
const byte = /** @type {Buffer} */ (currentBuffer).readUInt8(
currentPosition
);
currentPosition++;
checkOverflow();
return byte;
@ -323,6 +335,7 @@ class BinaryMiddleware extends SerializerMiddleware {
currentDataItem++;
currentBuffer =
currentDataItem < data.length ? data[currentDataItem] : null;
currentIsBuffer = Buffer.isBuffer(currentBuffer);
continue;
}
const header = readU8();
@ -355,21 +368,48 @@ class BinaryMiddleware extends SerializerMiddleware {
result.push(buf.toString());
} else if ((header & NUMBERS_HEADER_MASK) === F64_HEADER) {
const len = header & 0x1f;
const buf = read(8 * len + 8);
for (let i = 0; i <= len; i++) {
result.push(buf.readDoubleLE(i * 8));
const need = 8 * len + 8;
if (isInCurrentBuffer(need)) {
for (let i = 0; i <= len; i++) {
result.push(currentBuffer.readDoubleLE(currentPosition));
currentPosition += 8;
}
checkOverflow();
} else {
const buf = read(need);
for (let i = 0; i <= len; i++) {
result.push(buf.readDoubleLE(i * 8));
}
}
} else if ((header & NUMBERS_HEADER_MASK) === I32_HEADER) {
const len = header & 0x1f;
const buf = read(4 * len + 4);
for (let i = 0; i <= len; i++) {
result.push(buf.readInt32LE(i * 4));
const need = 4 * len + 4;
if (isInCurrentBuffer(need)) {
for (let i = 0; i <= len; i++) {
result.push(currentBuffer.readInt32LE(currentPosition));
currentPosition += 4;
}
checkOverflow();
} else {
const buf = read(need);
for (let i = 0; i <= len; i++) {
result.push(buf.readInt32LE(i * 4));
}
}
} else if ((header & NUMBERS_HEADER_MASK) === I8_HEADER) {
const len = header & 0x1f;
const buf = read(len + 1);
for (let i = 0; i <= len; i++) {
result.push(buf.readInt8(i));
const need = len + 1;
if (isInCurrentBuffer(need)) {
for (let i = 0; i <= len; i++) {
result.push(currentBuffer.readInt8(currentPosition));
currentPosition++;
}
checkOverflow();
} else {
const buf = read(need);
for (let i = 0; i <= len; i++) {
result.push(buf.readInt8(i));
}
}
} else if ((header & NULLS_HEADER_MASK) === NULLS_HEADER) {
const len = header & 0x0f;

View File

@ -31,7 +31,9 @@ class Section {
parts.push(r.then(items => new Section(items).resolve()));
hasPromise = true;
} else if (r) {
parts.push(new Section(r).resolve());
const part = new Section(r).resolve();
if (part instanceof Promise) hasPromise = true;
parts.push(part);
} else {
return null;
}

View File

@ -385,9 +385,10 @@ class ObjectMiddleware extends SerializerMiddleware {
throw new Error("Version missmatch, serializer changed");
let currentPos = 0;
const referenceable = new Map();
const referenceable = [];
const addReferenceable = item => {
referenceable.set(currentPos++, item);
referenceable.push(item);
currentPos++;
};
let currentPosTypeLookup = 0;
const objectTypeLookup = new Map();
@ -414,7 +415,7 @@ class ObjectMiddleware extends SerializerMiddleware {
);
} else if (typeof nextItem === "number" && nextItem < 0) {
// relative reference
return referenceable.get(currentPos + nextItem);
return referenceable[currentPos + nextItem];
} else {
const request = nextItem;
let serializer;

View File

@ -79,6 +79,8 @@ class LazySet {
} else {
this._toMerge.add(iterable);
this._needMerge = true;
// Avoid a memory leak
if (this._toMerge.size > 100000) this._merge();
}
return this;
}

View File

@ -9,6 +9,8 @@ const path = require("path");
/** @typedef {function(NodeJS.ErrnoException=): void} Callback */
/** @typedef {function(NodeJS.ErrnoException=, Buffer=): void} BufferCallback */
/** @typedef {function(NodeJS.ErrnoException=, string[]=): void} StringArrayCallback */
/** @typedef {function(NodeJS.ErrnoException=, string=): void} StringCallback */
/** @typedef {function(NodeJS.ErrnoException=, import("fs").Stats=): void} StatsCallback */
/**
@ -31,7 +33,9 @@ const path = require("path");
/**
* @typedef {Object} InputFileSystem
* @property {function(string, BufferCallback): void} readFile
* @property {function(string, StringArrayCallback): void} readdir
* @property {function(string, StatsCallback): void} stat
* @property {(function(string, StringCallback): void)=} realpath
* @property {(function(string=): void)=} purge
* @property {(function(string, string): string)=} join
* @property {(function(string, string): string)=} relative

View File

@ -4,52 +4,19 @@
"use strict";
const createHash = require("./createHash");
const { register } = require("./serialization");
const getPrototypeChain = C => {
const chain = [];
let current = C.prototype;
while (current !== Object.prototype) {
chain.push(current);
current = Object.getPrototypeOf(current);
}
return chain;
};
class ClassSerializer {
constructor(Constructor) {
this.Constructor = Constructor;
this.hash = null;
}
_createHash() {
const hash = createHash("md4");
const prototypeChain = getPrototypeChain(this.Constructor);
if (typeof this.Constructor.deserialize === "function")
hash.update(this.Constructor.deserialize.toString());
for (const p of prototypeChain) {
if (typeof p.serialize === "function") {
hash.update(p.serialize.toString());
}
if (typeof p.deserialize === "function") {
hash.update(p.deserialize.toString());
}
}
this.hash = hash.digest("base64");
}
serialize(obj, context) {
if (!this.hash) this._createHash();
context.write(this.hash);
obj.serialize(context);
}
deserialize(context) {
if (!this.hash) this._createHash();
const hash = context.read();
if (this.hash !== hash)
throw new Error(`Version mismatch for class ${this.Constructor.name}`);
if (typeof this.Constructor.deserialize === "function") {
return this.Constructor.deserialize(context);
}

View File

@ -194,6 +194,19 @@
"type": "object",
"additionalProperties": false,
"properties": {
"buildDependencies": {
"description": "Dependencies the build depends on (in multiple categories, default categories: 'defaultWebpack')",
"type": "object",
"additionalProperties": {
"description": "List of dependencies the build depends on",
"type": "array",
"items": {
"description": "Request to a dependency (resolved as directory relative to the context directory)",
"type": "string",
"minLength": 1
}
}
},
"cacheDirectory": {
"description": "Base directory for the cache (defaults to node_modules/.cache/webpack).",
"type": "string",
@ -218,6 +231,16 @@
"type": "number",
"minimum": 0
},
"managedPaths": {
"description": "List of paths that are managed by a package manager and can be trusted to not being modified otherwise",
"type": "array",
"items": {
"description": "A path to a managed directory (usually a node_modules directory)",
"type": "string",
"absolutePath": true,
"minLength": 1
}
},
"name": {
"description": "Name for the cache. Different names will lead to different coexisting caches.",
"type": "string"
@ -299,6 +322,16 @@
"type": "object",
"additionalProperties": false,
"properties": {
"managedPaths": {
"description": "List of paths that are managed by a package manager and can be trusted to not being modified otherwise",
"type": "array",
"items": {
"description": "A path to a managed directory (usually a node_modules directory)",
"type": "string",
"absolutePath": true,
"minLength": 1
}
},
"type": {
"description": "In memory caching",
"enum": ["memory"]
@ -785,7 +818,7 @@
"minLength": 1
},
"cacheGroups": {
"description": "Assign modules to a cache group (modules from different cache groups are tried to keep in separate chunks)",
"description": "Assign modules to a cache group (modules from different cache groups are tried to keep in separate chunks, default categories: 'default', 'defaultVendors')",
"type": "object",
"additionalProperties": {
"description": "Configuration for a cache group",

View File

@ -0,0 +1,71 @@
"use strict";
const path = require("path");
const child_process = require("child_process");
const fs = require("fs");
const rimraf = require("rimraf");
const cacheDirectory = path.resolve(__dirname, "js/buildDepsCache");
const outputDirectory = path.resolve(__dirname, "js/buildDeps");
const inputDirectory = path.resolve(__dirname, "js/buildDepsInput");
const exec = n => {
return new Promise((resolve, reject) => {
const p = child_process.fork(
path.resolve(__dirname, "fixtures/buildDependencies/run.js"),
[n],
{ stdio: ["ignore", "pipe", "inherit", "ipc"] }
);
const chunks = [];
p.stdout.on("data", chunk => chunks.push(chunk));
p.once("exit", code => {
const stdout = Buffer.concat(chunks).toString("utf-8");
if (code === 0) {
resolve(stdout);
} else {
reject(new Error(`Code ${code}: ${stdout}`));
}
});
p.once("error", err => reject(err));
});
};
describe("BuildDependencies", () => {
beforeEach(done => {
rimraf(cacheDirectory, done);
});
beforeEach(done => {
rimraf(outputDirectory, done);
});
beforeEach(done => {
fs.mkdir(inputDirectory, { recursive: true }, done);
});
it("should capture loader dependencies", async () => {
fs.writeFileSync(
path.resolve(inputDirectory, "loader-dependency.js"),
"module.exports = 1;"
);
await exec("1");
fs.writeFileSync(
path.resolve(inputDirectory, "loader-dependency.js"),
"module.exports = Date.now();"
);
const now = Date.now();
await exec("2");
await exec("3");
// eslint-disable-next-line node/no-missing-require
const first = require("./js/buildDeps/1/main.js");
// eslint-disable-next-line node/no-missing-require
const second = require("./js/buildDeps/2/main.js");
// eslint-disable-next-line node/no-missing-require
const third = require("./js/buildDeps/3/main.js");
expect(typeof first).toBe("number");
expect(typeof second).toBe("number");
expect(typeof third).toBe("number");
expect(first).toBe(1);
// Should be invalidated
expect(second).toBeGreaterThan(now);
// Should stay cached
expect(third).toBe(second);
}, 30000);
});

View File

@ -0,0 +1 @@
module.exports = require("./loader!");

View File

@ -0,0 +1 @@
module.exports = Date.now();

View File

@ -0,0 +1,6 @@
// eslint-disable-next-line node/no-missing-require
const value = require("../../js/buildDepsInput/loader-dependency");
module.exports = () => {
return `module.exports = ${value};`;
};

33
test/fixtures/buildDependencies/run.js vendored Normal file
View File

@ -0,0 +1,33 @@
const path = require("path");
const webpack = require("../../..");
process.exitCode = 1;
webpack(
{
mode: "development",
context: __dirname,
entry: "./index",
output: {
path: path.resolve(__dirname, "../../js/buildDeps/" + process.argv[2]),
libraryTarget: "commonjs2"
},
cache: {
type: "filesystem",
cacheDirectory: path.resolve(__dirname, "../../js/buildDepsCache"),
buildDependencies: {
config: [__filename]
}
}
},
(err, stats) => {
if (err) {
return console.log(err);
}
if (stats.hasErrors()) {
return console.log(stats.toString({ all: false, errors: true }));
}
process.exitCode = 0;
console.log("OK");
}
);

View File

@ -1,11 +1,12 @@
const path = require("path");
const fs = require("fs");
const BinaryMiddleware = require("../lib/serialization/BinaryMiddleware");
const FileMiddleware = require("../lib/serialization/FileMiddleware");
const Serializer = require("../lib/serialization/Serializer");
const serializer = new Serializer([
new BinaryMiddleware(),
new FileMiddleware()
new FileMiddleware(fs)
]);
const ESCAPE = null;