store build dependencies snapshot in pack

add snapshot shortcut for package manager managed paths
add cache.managedPaths
This commit is contained in:
Tobias Koppers 2019-08-12 22:59:09 +02:00
parent ada11a286d
commit e5cecdad23
17 changed files with 825 additions and 74 deletions

11
declarations.d.ts vendored
View File

@ -8,6 +8,12 @@ declare namespace NodeJS {
}
declare module "neo-async" {
interface QueueObject<T, E> {
push(item: T): void;
drain: () => void;
error: (err: E) => void;
}
export interface Dictionary<T> {
[key: string]: T;
}
@ -104,6 +110,11 @@ declare module "neo-async" {
callback?: AsyncResultObjectCallback<T, E>
): void;
export function queue<T, E>(
worker: AsyncFunction<T, E>,
concurrency?: number
): QueueObject<T, E>;
export const forEach: typeof each;
export const forEachLimit: typeof eachLimit;
}

View File

@ -441,6 +441,10 @@ export interface WebpackOptions {
* via the `definition` "MemoryCacheOptions".
*/
export interface MemoryCacheOptions {
/**
* List of paths that are managed by a package manager and can be trusted to not being modified otherwise
*/
managedPaths?: string[];
/**
* In memory caching
*/
@ -480,6 +484,10 @@ export interface FileCacheOptions {
* Time in ms after which idle period the initial cache storing should happen (only for store: 'pack' or 'idle')
*/
idleTimeoutForInitialStore?: number;
/**
* List of paths that are managed by a package manager and can be trusted to not being modified otherwise
*/
managedPaths?: string[];
/**
* Name for the cache. Different names will lead to different coexisting caches.
*/

View File

@ -3,6 +3,7 @@
```javascript
console.log(process.env.NODE_ENV);
import "./example.css";
import "react";
import "react-dom";
import "acorn";
@ -17,13 +18,31 @@ const path = require("path");
module.exports = (env = "development") => ({
mode: env,
infrastructureLogging: {
// Optional: print more verbose logging about caching
level: "verbose"
},
cache: {
type: "filesystem",
// changing the cacheDirectory is optional,
// by default it will be in `node_modules/.cache`
cacheDirectory: path.resolve(__dirname, ".cache")
cacheDirectory: path.resolve(__dirname, ".cache"),
// Add additional dependencies to the build
buildDependencies: {
// recommended to invalidate cache on config changes
// This also makes all dependencies of this file build dependencies
config: [__filename]
// By default webpack and loaders are build dependencies
}
},
module: {
rules: [
{
test: /\.css$/,
use: ["style-loader", "css-loader"]
}
]
}
});
```
@ -33,10 +52,10 @@ module.exports = (env = "development") => ({
```
Hash: 0a1b2c3d4e5f6a7b8c9d
Version: webpack 5.0.0-alpha.19
Asset Size Chunks Chunk Names
output.js 1.78 MiB {0} [emitted] main
Asset Size Chunks Chunk Names
output.js 1.8 MiB {0} [emitted] main
Entrypoint main = output.js
chunk {0} output.js (main) 1.54 MiB (javascript) 1.07 KiB (runtime) [entry]
chunk {0} output.js (main) 1.55 MiB (javascript) 1.07 KiB (runtime) [entry]
> ./example.js main
526 chunk modules
530 chunk modules
```

View File

@ -0,0 +1,3 @@
body {
background: red;
}

View File

@ -1,5 +1,6 @@
console.log(process.env.NODE_ENV);
import "./example.css";
import "react";
import "react-dom";
import "acorn";

View File

@ -2,12 +2,30 @@ const path = require("path");
module.exports = (env = "development") => ({
mode: env,
infrastructureLogging: {
// Optional: print more verbose logging about caching
level: "verbose"
},
cache: {
type: "filesystem",
// changing the cacheDirectory is optional,
// by default it will be in `node_modules/.cache`
cacheDirectory: path.resolve(__dirname, ".cache")
cacheDirectory: path.resolve(__dirname, ".cache"),
// Add additional dependencies to the build
buildDependencies: {
// recommended to invalidate cache on config changes
// This also makes all dependencies of this file build dependencies
config: [__filename]
// By default webpack and loaders are build dependencies
}
},
module: {
rules: [
{
test: /\.css$/,
use: ["style-loader", "css-loader"]
}
]
}
});

View File

@ -414,7 +414,9 @@ class Compilation {
this.compiler = compiler;
this.resolverFactory = compiler.resolverFactory;
this.inputFileSystem = compiler.inputFileSystem;
this.fileSystemInfo = new FileSystemInfo(this.inputFileSystem);
this.fileSystemInfo = new FileSystemInfo(this.inputFileSystem, {
managedPaths: compiler.managedPaths
});
if (compiler.fileTimestamps) {
this.fileSystemInfo.addFileTimestamps(compiler.fileTimestamps);
}

View File

@ -176,6 +176,8 @@ class Compiler {
this.recordsOutputPath = null;
this.records = {};
/** @type {Set<string>} */
this.managedPaths = new Set();
/** @type {Set<string>} */
this.removedFiles = new Set();
/** @type {Map<string, FileSystemInfoEntry | null>} */
this.fileTimestamps = new Map();

View File

@ -5,11 +5,19 @@
"use strict";
const resolve = require("enhanced-resolve");
const asyncLib = require("neo-async");
const AsyncQueue = require("./util/AsyncQueue");
const createHash = require("./util/createHash");
const { join, dirname } = require("./util/fs");
/** @typedef {import("./WebpackError")} WebpackError */
/** @typedef {import("./util/fs").InputFileSystem} InputFileSystem */
const resolveContext = resolve.create({
resolveToContext: true
});
let FS_ACCURACY = 2000;
/**
@ -20,10 +28,13 @@ let FS_ACCURACY = 2000;
/**
* @typedef {Object} Snapshot
* @property {number} startTime
* @property {Map<string, FileSystemInfoEntry | "error">} fileTimestamps
* @property {Map<string, FileSystemInfoEntry | "error">} contextTimestamps
* @property {Map<string, FileSystemInfoEntry | "error">} missingTimestamps
* @property {number=} startTime
* @property {Map<string, FileSystemInfoEntry | "error">=} fileTimestamps
* @property {Map<string, string | "error">=} fileHashes
* @property {Map<string, FileSystemInfoEntry | "error">=} contextTimestamps
* @property {Map<string, string | "error">=} contextHashes
* @property {Map<string, FileSystemInfoEntry | "error">=} missingTimestamps
* @property {Map<string, string | "error">=} managedItemInfo
*/
/* istanbul ignore next */
@ -34,26 +45,81 @@ const applyMtime = mtime => {
else if (FS_ACCURACY > 1000 && mtime % 2000 !== 0) FS_ACCURACY = 1000;
};
const mergeMaps = (a, b) => {
if (b.size === 0) return a;
const map = new Map(a);
for (const [key, value] of b) {
map.set(key, value);
}
return map;
};
const getManagedItem = (managedPath, path) => {
const remaining = path.slice(managedPath.length);
let i = 0;
let slashes = 2;
loop: while (i < remaining.length) {
switch (remaining.charCodeAt(i)) {
case 47: // slash
case 92: // backslash
if (--slashes === 0) break loop;
break;
case 64: // @
slashes++;
break;
}
i++;
}
return path.slice(0, managedPath.length + i);
};
class FileSystemInfo {
/**
* @param {InputFileSystem} fs file system
* @param {Object} options options
* @param {Iterable<string>=} options.managedPaths paths that are only managed by a package manager
*/
constructor(fs) {
constructor(fs, { managedPaths = [] } = {}) {
this.fs = fs;
/** @type {Map<string, FileSystemInfoEntry | null>} */
this._fileTimestamps = new Map();
/** @type {Map<string, string>} */
this._fileHashes = new Map();
/** @type {Map<string, FileSystemInfoEntry | null>} */
this._contextTimestamps = new Map();
/** @type {Map<string, string>} */
this._contextHashes = new Map();
/** @type {Map<string, string>} */
this._managedItems = new Map();
this.fileTimestampQueue = new AsyncQueue({
name: "file timestamp",
parallelism: 30,
processor: this._readFileTimestamp.bind(this)
});
this.fileHashQueue = new AsyncQueue({
name: "file hash",
parallelism: 10,
processor: this._readFileHash.bind(this)
});
this.contextTimestampQueue = new AsyncQueue({
name: "context timestamp",
parallelism: 2,
processor: this._readContextTimestamp.bind(this)
});
this.contextHashQueue = new AsyncQueue({
name: "context hash",
parallelism: 2,
processor: this._readContextHash.bind(this)
});
this.managedItemQueue = new AsyncQueue({
name: "managed item info",
parallelism: 10,
processor: this._getManagedItemInfo.bind(this)
});
this.managedPaths = Array.from(managedPaths);
this.managedPathsWithSlash = this.managedPaths.map(p =>
join(fs, p, "_").slice(0, -1)
);
}
/**
@ -98,6 +164,193 @@ class FileSystemInfo {
this.contextTimestampQueue.add(path, callback);
}
/**
* @param {string} path file path
* @param {function(WebpackError=, string=): void} callback callback function
* @returns {void}
*/
getFileHash(path, callback) {
const cache = this._fileHashes.get(path);
if (cache !== undefined) return callback(null, cache);
this.fileHashQueue.add(path, callback);
}
/**
* @param {string} path context path
* @param {function(WebpackError=, string=): void} callback callback function
* @returns {void}
*/
getContextHash(path, callback) {
const cache = this._contextHashes.get(path);
if (cache !== undefined) return callback(null, cache);
this.contextHashQueue.add(path, callback);
}
resolveBuildDependencies(context, deps, callback) {
const files = new Set();
const directories = new Set();
const missing = new Set();
const queue = asyncLib.queue(({ type, context, path }, callback) => {
switch (type) {
case "resolve": {
const isDirectory = /[\\/]$/.test(path);
const isDeps = /^deps:/.test(path);
if (isDeps) path = path.slice(5);
if (isDirectory) {
resolveContext(
context,
path.replace(/[\\/]$/, ""),
(err, result) => {
if (err) return callback(err);
queue.push({
type: isDeps ? "directory-dependencies" : "directory",
path: result
});
callback();
}
);
} else {
resolve(context, path, (err, result) => {
if (err) return callback(err);
queue.push({
type: isDeps ? "file-dependencies" : "file",
path: result
});
callback();
});
}
break;
}
case "resolve-directory": {
resolveContext(context, path, (err, result) => {
if (err) return callback(err);
queue.push({
type: "directory",
path: result
});
callback();
});
break;
}
case "file": {
if (files.has(path)) {
callback();
break;
}
this.fs.realpath(path, (err, realPath) => {
if (err) return callback(err);
if (!files.has(realPath)) {
files.add(realPath);
queue.push({
type: "file-dependencies",
path: realPath
});
}
callback();
});
break;
}
case "directory": {
if (directories.has(path)) {
callback();
break;
}
this.fs.realpath(path, (err, realPath) => {
if (err) return callback(err);
if (!directories.has(realPath)) {
directories.add(realPath);
queue.push({
type: "directory-dependencies",
path: realPath
});
}
callback();
});
break;
}
case "file-dependencies": {
const module = require.cache[path];
if (module && Array.isArray(module.children)) {
for (const child of module.children) {
if (child.id) {
queue.push({
type: "file",
path: child.id
});
}
}
} else {
// Unable to get dependencies from module system
// This may be because of an incomplete require.cache implementation like in jest
// Assume requires stay in directory and add the whole directory
const directory = dirname(this.fs, path);
queue.push({
type: "directory",
path: directory
});
}
callback();
break;
}
case "directory-dependencies": {
const match = /(^.+[\\/]node_modules[\\/](?:@[^\\/]+[\\/])?[^\\/]+)/.exec(
path
);
const packagePath = match ? match[1] : path;
const packageJson = join(this.fs, packagePath, "package.json");
this.fs.readFile(packageJson, (err, content) => {
if (err) {
if (err.code === "ENOENT") {
const parent = dirname(this.fs, packagePath);
if (parent !== packagePath) {
queue.push({
type: "directory-dependencies",
path: parent
});
}
callback();
return;
}
return callback(err);
}
let packageData;
try {
packageData = JSON.parse(content.toString("utf-8"));
} catch (e) {
return callback(e);
}
const depsObject = packageData.dependencies;
if (typeof depsObject === "object" && depsObject) {
for (const dep of Object.keys(depsObject)) {
queue.push({
type: "resolve-directory",
context: packagePath,
path: dep
});
}
}
callback();
});
break;
}
}
}, 50);
queue.drain = () => {
callback(null, { files, directories, missing });
};
queue.error = err => {
callback(err);
callback = () => {};
};
for (const dep of deps) {
queue.push({
type: "resolve",
context,
path: dep
});
}
}
/**
*
* @param {number} startTime when processing the files has started
@ -111,47 +364,130 @@ class FileSystemInfo {
createSnapshot(startTime, files, directories, missing, options, callback) {
/** @type {Map<string, FileSystemInfoEntry | "error">} */
const fileTimestamps = new Map();
/** @type {Map<string, string | "error">} */
const fileHashes = new Map();
/** @type {Map<string, FileSystemInfoEntry | "error">} */
const contextTimestamps = new Map();
/** @type {Map<string, string | "error">} */
const contextHashes = new Map();
/** @type {Map<string, FileSystemInfoEntry | "error">} */
const missingTimestamps = new Map();
/** @type {Map<string, string | "error">} */
const managedItemInfo = new Map();
const managedItems = new Set();
let jobs = 1;
const jobDone = () => {
if (--jobs === 0) {
callback(null, {
startTime,
fileTimestamps,
contextTimestamps,
missingTimestamps
});
const snapshot = {};
if (startTime) snapshot.startTime = startTime;
if (fileTimestamps.size !== 0) snapshot.fileTimestamps = fileTimestamps;
if (fileHashes.size !== 0) snapshot.fileHashes = fileHashes;
if (contextTimestamps.size !== 0)
snapshot.contextTimestamps = contextTimestamps;
if (contextHashes.size !== 0) snapshot.contextHashes = contextHashes;
if (missingTimestamps.size !== 0)
snapshot.missingTimestamps = missingTimestamps;
if (managedItemInfo.size !== 0)
snapshot.managedItemInfo = managedItemInfo;
callback(null, snapshot);
}
};
if (files) {
for (const path of files) {
const cache = this._fileTimestamps.get(path);
if (cache !== undefined) {
fileTimestamps.set(path, cache);
} else {
jobs++;
this.fileTimestampQueue.add(path, (err, entry) => {
if (err) {
fileTimestamps.set(path, "error");
} else {
fileTimestamps.set(path, entry);
if (options && options.hash) {
files: for (const path of files) {
for (const managedPath of this.managedPathsWithSlash) {
if (path.startsWith(managedPath)) {
managedItems.add(getManagedItem(managedPath, path));
continue files;
}
jobDone();
});
}
const cache = this._fileHashes.get(path);
if (cache !== undefined) {
fileHashes.set(path, cache);
} else {
jobs++;
this.fileHashQueue.add(path, (err, entry) => {
if (err) {
fileHashes.set(path, "error");
} else {
fileHashes.set(path, entry);
}
jobDone();
});
}
}
} else {
files: for (const path of files) {
for (const managedPath of this.managedPathsWithSlash) {
if (path.startsWith(managedPath)) {
managedItems.add(getManagedItem(managedPath, path));
continue files;
}
}
const cache = this._fileTimestamps.get(path);
if (cache !== undefined) {
fileTimestamps.set(path, cache);
} else {
jobs++;
this.fileTimestampQueue.add(path, (err, entry) => {
if (err) {
fileTimestamps.set(path, "error");
} else {
fileTimestamps.set(path, entry);
}
jobDone();
});
}
}
}
}
if (directories) {
for (const path of directories) {
contextTimestamps.set(path, "error");
// TODO: getContextTimestamp is not implemented yet
if (options && options.hash) {
directories: for (const path of directories) {
for (const managedPath of this.managedPathsWithSlash) {
if (path.startsWith(managedPath)) {
managedItems.add(getManagedItem(managedPath, path));
continue directories;
}
}
const cache = this._contextHashes.get(path);
if (cache !== undefined) {
contextHashes.set(path, cache);
} else {
jobs++;
this.contextHashQueue.add(path, (err, entry) => {
if (err) {
contextHashes.set(path, "error");
} else {
contextHashes.set(path, entry);
}
jobDone();
});
}
}
} else {
directories: for (const path of directories) {
for (const managedPath of this.managedPathsWithSlash) {
if (path.startsWith(managedPath)) {
managedItems.add(getManagedItem(managedPath, path));
continue directories;
}
}
contextTimestamps.set(path, "error");
// TODO: getContextTimestamp is not implemented yet
}
}
}
if (missing) {
for (const path of missing) {
missing: for (const path of missing) {
for (const managedPath of this.managedPathsWithSlash) {
if (path.startsWith(managedPath)) {
managedItems.add(getManagedItem(managedPath, path));
continue missing;
}
}
const cache = this._fileTimestamps.get(path);
if (cache !== undefined) {
missingTimestamps.set(path, cache);
@ -168,9 +504,76 @@ class FileSystemInfo {
}
}
}
for (const path of managedItems) {
const cache = this._managedItems.get(path);
if (cache !== undefined) {
managedItemInfo.set(path, cache);
} else {
jobs++;
this.managedItemQueue.add(path, (err, entry) => {
if (err) {
managedItemInfo.set(path, "error");
} else {
managedItemInfo.set(path, entry);
}
jobDone();
});
}
}
jobDone();
}
/**
* @param {Snapshot} snapshot1 a snapshot
* @param {Snapshot} snapshot2 a snapshot
* @returns {Snapshot} merged snapshot
*/
mergeSnapshots(snapshot1, snapshot2) {
/** @type {Snapshot} */
const snapshot = {};
if (snapshot1.startTime && snapshot2.startTime)
snapshot.startTime = Math.min(snapshot1.startTime, snapshot2.startTime);
else if (snapshot2.startTime) snapshot.startTime = snapshot2.startTime;
else if (snapshot1.startTime) snapshot.startTime = snapshot1.startTime;
if (snapshot1.fileTimestamps || snapshot2.fileTimestamps) {
snapshot.fileTimestamps = mergeMaps(
snapshot1.fileTimestamps,
snapshot2.fileTimestamps
);
}
if (snapshot1.fileHashes || snapshot2.fileHashes) {
snapshot.fileHashes = mergeMaps(
snapshot1.fileHashes,
snapshot2.fileHashes
);
}
if (snapshot1.contextTimestamps || snapshot2.contextTimestamps) {
snapshot.contextTimestamps = mergeMaps(
snapshot1.contextTimestamps,
snapshot2.contextTimestamps
);
}
if (snapshot1.contextHashes || snapshot2.contextHashes) {
snapshot.contextHashes = mergeMaps(
snapshot1.contextHashes,
snapshot2.contextHashes
);
}
if (snapshot1.missingTimestamps || snapshot2.missingTimestamps) {
snapshot.missingTimestamps = mergeMaps(
snapshot1.missingTimestamps,
snapshot2.missingTimestamps
);
}
if (snapshot1.managedItemInfo || snapshot2.managedItemInfo) {
snapshot.managedItemInfo = mergeMaps(
snapshot1.managedItemInfo,
snapshot2.managedItemInfo
);
}
return snapshot;
}
/**
* @param {Snapshot} snapshot the snapshot made
* @param {function(WebpackError=, boolean=): void} callback callback function
@ -180,7 +583,9 @@ class FileSystemInfo {
const {
startTime,
fileTimestamps,
fileHashes,
contextTimestamps,
contextHashes,
missingTimestamps
} = snapshot;
let jobs = 1;
@ -195,6 +600,14 @@ class FileSystemInfo {
callback(null, false);
}
};
const checkHash = (current, snap) => {
if (snap === "error") {
// If there was an error while snapshotting (i. e. EBUSY)
// we can't compare further data and assume it's invalid
return false;
}
return current === snap;
};
/**
* @param {FileSystemInfoEntry} current current entry
* @param {FileSystemInfoEntry | "error"} snap entry from snapshot
@ -242,51 +655,93 @@ class FileSystemInfo {
}
return true;
};
for (const [path, ts] of fileTimestamps) {
const cache = this._fileTimestamps.get(path);
if (cache !== undefined) {
if (!checkFile(cache, ts)) {
invalid();
}
} else {
jobs++;
this.fileTimestampQueue.add(path, (err, entry) => {
if (err) return invalid();
if (!checkFile(entry, ts)) {
if (fileTimestamps) {
for (const [path, ts] of fileTimestamps) {
const cache = this._fileTimestamps.get(path);
if (cache !== undefined) {
if (!checkFile(cache, ts)) {
invalid();
} else {
jobDone();
}
});
} else {
jobs++;
this.fileTimestampQueue.add(path, (err, entry) => {
if (err) return invalid();
if (!checkFile(entry, ts)) {
invalid();
} else {
jobDone();
}
});
}
}
}
if (contextTimestamps.size > 0) {
if (fileHashes) {
for (const [path, hash] of fileHashes) {
const cache = this._fileHashes.get(path);
if (cache !== undefined) {
if (!checkHash(cache, hash)) {
invalid();
}
} else {
jobs++;
this.fileHashQueue.add(path, (err, entry) => {
if (err) return invalid();
if (!checkHash(entry, hash)) {
invalid();
} else {
jobDone();
}
});
}
}
}
if (contextTimestamps && contextTimestamps.size > 0) {
// TODO: getContextTimestamp is not implemented yet
invalid();
}
for (const [path, ts] of missingTimestamps) {
const cache = this._fileTimestamps.get(path);
if (cache !== undefined) {
if (!checkExistance(cache, ts)) {
invalid();
}
} else {
jobs++;
this.fileTimestampQueue.add(path, (err, entry) => {
if (err) return invalid();
if (!checkExistance(entry, ts)) {
if (contextHashes) {
for (const [path, hash] of contextHashes) {
const cache = this._contextHashes.get(path);
if (cache !== undefined) {
if (!checkHash(cache, hash)) {
invalid();
} else {
jobDone();
}
});
} else {
jobs++;
this.contextHashQueue.add(path, (err, entry) => {
if (err) return invalid();
if (!checkHash(entry, hash)) {
invalid();
} else {
jobDone();
}
});
}
}
}
if (missingTimestamps) {
for (const [path, ts] of missingTimestamps) {
const cache = this._fileTimestamps.get(path);
if (cache !== undefined) {
if (!checkExistance(cache, ts)) {
invalid();
}
} else {
jobs++;
this.fileTimestampQueue.add(path, (err, entry) => {
if (err) return invalid();
if (!checkExistance(entry, ts)) {
invalid();
} else {
jobDone();
}
});
}
}
}
jobDone();
}
// TODO getFileHash(path, callback)
_readFileTimestamp(path, callback) {
this.fs.stat(path, (err, stat) => {
if (err) {
@ -312,12 +767,99 @@ class FileSystemInfo {
});
}
_readFileHash(path, callback) {
this.fs.readFile(path, (err, content) => {
if (err) {
if (err.code === "ENOENT") {
this._fileHashes.set(path, null);
return callback(null, null);
}
return callback(err);
}
const hash = createHash("md4");
hash.update(content);
const digest = /** @type {string} */ (hash.digest("hex"));
this._fileHashes.set(path, digest);
callback(null, digest);
});
}
_readContextTimestamp(path, callback) {
// TODO read whole folder
this._contextTimestamps.set(path, null);
callback(null, null);
}
_readContextHash(path, callback) {
this.fs.readdir(path, (err, files) => {
if (err) {
if (err.code === "ENOENT") {
this._contextHashes.set(path, null);
return callback(null, null);
}
return callback(err);
}
files = files
.map(file => file.normalize("NFC"))
.filter(file => !/^\./.test(file))
.sort();
asyncLib.map(
files,
(file, callback) => {
const child = join(this.fs, path, file);
this.fs.stat(child, (err, stat) => {
if (err) return callback(err);
if (stat.isFile()) {
return this.getFileHash(child, callback);
}
if (stat.isDirectory()) {
this.contextHashQueue.increaseParallelism();
this.getContextHash(child, (err, hash) => {
this.contextHashQueue.decreaseParallelism();
callback(err, hash || "");
});
return;
}
callback(null, "");
});
},
(err, fileHashes) => {
const hash = createHash("md4");
for (const file of files) hash.update(file);
for (const h of fileHashes) hash.update(h);
const digest = /** @type {string} */ (hash.digest("hex"));
this._contextHashes.set(path, digest);
callback(null, digest);
}
);
});
}
_getManagedItemInfo(path, callback) {
const packageJsonPath = join(this.fs, path, "package.json");
this.fs.readFile(packageJsonPath, (err, content) => {
if (err) return callback(err);
let data;
try {
data = JSON.parse(content.toString("utf-8"));
} catch (e) {
return callback(e);
}
const info = `${data.name || ""}@${data.version || ""}`;
callback(null, info);
});
}
getDeprecatedFileTimestamps() {
const map = new Map();
for (const [path, info] of this._fileTimestamps) {

View File

@ -540,6 +540,8 @@ class WebpackOptionsApply extends OptionsApply {
if (options.cache && typeof options.cache === "object") {
const cacheOptions = options.cache;
const AddManagedPathsPlugin = require("./cache/AddManagedPathsPlugin");
new AddManagedPathsPlugin(cacheOptions.managedPaths).apply(compiler);
switch (cacheOptions.type) {
case "memory": {
const MemoryCachePlugin = require("./cache/MemoryCachePlugin");
@ -548,7 +550,7 @@ class WebpackOptionsApply extends OptionsApply {
}
case "filesystem": {
const AddBuildDependenciesPlugin = require("./cache/AddBuildDependenciesPlugin");
for(const key in cacheOptions.buildDependencies) {
for (const key in cacheOptions.buildDependencies) {
const list = cacheOptions.buildDependencies[key];
new AddBuildDependenciesPlugin(list).apply(compiler);
}
@ -611,11 +613,13 @@ class WebpackOptionsApply extends OptionsApply {
new IdleFileCachePlugin(
new PackFileCacheStrategy({
fs: compiler.intermediateFileSystem,
context: options.context,
cacheLocation: cacheOptions.cacheLocation,
version: cacheOptions.version,
logger: compiler.getInfrastructureLogger(
"webpack.cache.PackFileCacheStrategy"
)
),
managedPaths: cacheOptions.managedPaths
}),
cacheOptions.idleTimeout,
cacheOptions.idleTimeoutForInitialStore

View File

@ -91,13 +91,20 @@ class WebpackOptionsDefaulter extends OptionsDefaulter {
}
value.buildDependencies = Object.assign({}, value.buildDependencies);
if (value.buildDependencies.defaultWebpack === undefined) {
value.buildDependencies.defaultWebpack = [
path.resolve(__dirname, "..") + path.sep
];
value.buildDependencies.defaultWebpack = [__dirname + path.sep];
}
}
return value;
});
this.set("cache.managedPaths", "make", () => {
const match = /^(.+?[\\/]node_modules[\\/])/.exec(
// eslint-disable-next-line node/no-extraneous-require
require.resolve("watchpack")
);
if (match) {
return [match[1]];
}
});
this.set("context", process.cwd());
this.set("target", "web");

29
lib/cache/AddManagedPathsPlugin.js vendored Normal file
View File

@ -0,0 +1,29 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
/** @typedef {import("../Compiler")} Compiler */
class AddManagedPathsPlugin {
/**
* @param {Iterable<string>} managedPaths list of managed paths
*/
constructor(managedPaths) {
this.managedPaths = new Set(managedPaths);
}
/**
* @param {Compiler} compiler Webpack compiler
* @returns {void}
*/
apply(compiler) {
for (const managedPath of this.managedPaths) {
compiler.managedPaths.add(managedPath);
}
}
}
module.exports = AddManagedPathsPlugin;

View File

@ -9,6 +9,8 @@ const Cache = require("../Cache");
/** @typedef {import("../Compiler")} Compiler */
const BUILD_DEPENDENCIES_KEY = Symbol();
class IdleFileCachePlugin {
/**
* @param {TODO} strategy cache strategy
@ -35,7 +37,7 @@ class IdleFileCachePlugin {
const resolvedPromise = Promise.resolve();
/** @type {Map<string, () => Promise>} */
/** @type {Map<string | typeof BUILD_DEPENDENCIES_KEY, () => Promise>} */
const pendingIdleTasks = new Map();
compiler.cache.hooks.store.tap(
@ -67,6 +69,15 @@ class IdleFileCachePlugin {
}
);
compiler.cache.hooks.storeBuildDependencies.tap(
{ name: "IdleFile", stage: Cache.STAGE_DISK },
dependencies => {
pendingIdleTasks.set(BUILD_DEPENDENCIES_KEY, () =>
strategy.storeBuildDependencies(dependencies)
);
}
);
compiler.cache.hooks.shutdown.tapPromise(
{ name: "IdleFileCachePlugin", stage: Cache.STAGE_DISK },
() => {

View File

@ -5,6 +5,7 @@
"use strict";
const FileSystemInfo = require("../FileSystemInfo");
const makeSerializable = require("../util/makeSerializable");
const {
createFileSerializer,
@ -21,6 +22,7 @@ class Pack {
this.etags = new Map();
/** @type {Map<string, any | (() => Promise<PackEntry>)>} */
this.content = new Map();
this.buildSnapshot = undefined;
this.lastAccess = new Map();
this.lastSizes = new Map();
this.unserializable = new Set();
@ -78,6 +80,7 @@ class Pack {
write(this.etags);
write(this.unserializable);
write(this.lastAccess);
write(this.buildSnapshot);
for (const [identifier, data] of this.content) {
write(identifier);
if (typeof data === "function") {
@ -101,6 +104,7 @@ class Pack {
this.etags = read();
this.unserializable = read();
this.lastAccess = read();
this.buildSnapshot = read();
this.content = new Map();
let identifier = read();
while (identifier !== null) {
@ -191,8 +195,10 @@ makeSerializable(
);
class PackFileCacheStrategy {
constructor({ fs, cacheLocation, version, logger }) {
constructor({ fs, context, cacheLocation, version, logger, managedPaths }) {
this.fileSerializer = createFileSerializer(fs);
this.fileSystemInfo = new FileSystemInfo(fs, { managedPaths });
this.context = context;
this.cacheLocation = cacheLocation;
this.logger = logger;
logger.time("restore pack");
@ -213,7 +219,26 @@ class PackFileCacheStrategy {
);
return new Pack(version, logger);
}
return cacheEntry;
if (!cacheEntry.buildSnapshot) {
return cacheEntry;
}
return new Promise((resolve, reject) => {
logger.time("check build dependencies");
this.fileSystemInfo.checkSnapshotValid(
cacheEntry.buildSnapshot,
(err, valid) => {
if (err) return reject(err);
logger.timeEnd("check build dependencies");
if (!valid) {
logger.log(
`Restored pack from ${cacheLocation}.pack, but build dependencies have changed.`
);
return resolve(new Pack(version, logger));
}
return resolve(cacheEntry);
}
);
});
}
return new Pack(version, logger);
})
@ -248,6 +273,49 @@ class PackFileCacheStrategy {
});
}
storeBuildDependencies(dependencies) {
this.logger.debug("Storing build dependencies...");
return new Promise((resolve, reject) => {
this.logger.time("resolve build dependencies");
this.fileSystemInfo.resolveBuildDependencies(
this.context,
dependencies,
(err, result) => {
if (err) return reject(err);
this.logger.timeEnd("resolve build dependencies");
this.logger.time("snapshot build dependencies");
const { files, directories, missing } = result;
this.fileSystemInfo.createSnapshot(
undefined,
files,
directories,
missing,
{ hash: true },
(err, snapshot) => {
if (err) return reject(err);
this.logger.timeEnd("snapshot build dependencies");
this.logger.debug("Stored build dependencies");
resolve(
this.packPromise.then(pack => {
if (pack.buildSnapshot) {
pack.buildSnapshot = this.fileSystemInfo.mergeSnapshots(
pack.buildSnapshot,
snapshot
);
} else {
pack.buildSnapshot = snapshot;
}
})
);
}
);
}
);
});
}
afterAllStored() {
return this.packPromise.then(pack => {
if (!pack.invalid) return;

View File

@ -85,6 +85,8 @@ class SeparateFilesCacheStrategy {
});
}
storeBuildDependencies() {}
afterAllStored() {}
}

View File

@ -9,6 +9,8 @@ const path = require("path");
/** @typedef {function(NodeJS.ErrnoException=): void} Callback */
/** @typedef {function(NodeJS.ErrnoException=, Buffer=): void} BufferCallback */
/** @typedef {function(NodeJS.ErrnoException=, string[]=): void} StringArrayCallback */
/** @typedef {function(NodeJS.ErrnoException=, string=): void} StringCallback */
/** @typedef {function(NodeJS.ErrnoException=, import("fs").Stats=): void} StatsCallback */
/**
@ -31,7 +33,9 @@ const path = require("path");
/**
* @typedef {Object} InputFileSystem
* @property {function(string, BufferCallback): void} readFile
* @property {function(string, StringArrayCallback): void} readdir
* @property {function(string, StatsCallback): void} stat
* @property {(function(string, StringCallback): void)=} realpath
* @property {(function(string=): void)=} purge
* @property {(function(string, string): string)=} join
* @property {(function(string, string): string)=} relative

View File

@ -231,6 +231,16 @@
"type": "number",
"minimum": 0
},
"managedPaths": {
"description": "List of paths that are managed by a package manager and can be trusted to not being modified otherwise",
"type": "array",
"items": {
"description": "A path to a managed directory (usually a node_modules directory)",
"type": "string",
"absolutePath": true,
"minLength": 1
}
},
"name": {
"description": "Name for the cache. Different names will lead to different coexisting caches.",
"type": "string"
@ -312,6 +322,16 @@
"type": "object",
"additionalProperties": false,
"properties": {
"managedPaths": {
"description": "List of paths that are managed by a package manager and can be trusted to not being modified otherwise",
"type": "array",
"items": {
"description": "A path to a managed directory (usually a node_modules directory)",
"type": "string",
"absolutePath": true,
"minLength": 1
}
},
"type": {
"description": "In memory caching",
"enum": ["memory"]