add persistent caching prototype

This commit is contained in:
Tobias Koppers 2018-10-09 14:30:59 +02:00
parent 518104b3fb
commit a852828279
49 changed files with 2222 additions and 55 deletions

1
.gitignore vendored
View File

@ -10,5 +10,6 @@
*.log
.idea
.vscode
.cache
.eslintcache
package-lock.json

View File

@ -257,11 +257,7 @@ export interface WebpackOptions {
/**
* Cache generated modules and chunks to improve performance for multiple incremental builds.
*/
cache?:
| boolean
| {
[k: string]: any;
};
cache?: false | true | MemoryCacheOptions | FileCacheOptions;
/**
* The base directory (absolute path!) for resolving the `entry` option. If `output.pathinfo` is set, the included pathinfo is shortened to this directory.
*/
@ -408,6 +404,54 @@ export interface WebpackOptions {
stdin?: boolean;
};
}
/**
* This interface was referenced by `WebpackOptions`'s JSON-Schema
* via the `definition` "MemoryCacheOptions".
*/
export interface MemoryCacheOptions {
/**
* In memory caching
*/
type: "memory";
}
/**
* This interface was referenced by `WebpackOptions`'s JSON-Schema
* via the `definition` "FileCacheOptions".
*/
export interface FileCacheOptions {
/**
* Base directory for the cache (defaults to node_modules/.cache/webpack).
*/
cacheDirectory?: string;
/**
* Algorithm used for generation the hash (see node.js crypto package)
*/
hashAlgorithm?: string;
/**
* Display log info when cache in accessed.
*/
log?: boolean;
/**
* Name for the cache. Different names will lead to different coexisting caches.
*/
name?: string;
/**
* When to store data to the filesystem. (idle: Store data when compiler is idle; background: Store data in background while compiling, but doesn't block the compilation; instant: Store data when creating blocking compilation until data is stored; defaults to idle)
*/
store?: "idle" | "background" | "instant";
/**
* Filesystem caching
*/
type: "filesystem";
/**
* Version of the cache data. Different versions won't allow to reuse the cache and override existing content. Update the version when config changed in a way which doesn't allow to reuse cache. This will invalidate the cache.
*/
version?: string;
/**
* Display warnings when (de)serialization of data failed.
*/
warn?: boolean;
}
/**
* Multiple entry bundles are created. The key is the chunk name. The value can be a string or an array.
*

View File

@ -0,0 +1,39 @@
# example.js
``` javascript
console.log(process.env.NODE_ENV);
import "react";
import "react-dom";
import "acorn";
import "core-js";
import "date-fns";
```
# webpack.config.js
``` javascript
const path = require("path");
module.exports = (env = "development") => ({
mode: env,
cache: {
type: "filesystem",
name: env,
cacheDirectory: path.resolve(__dirname, ".cache"),
warn: true
}
});
```
# Info
```
Hash: 0a1b2c3d4e5f6a7b8c9d
Version: webpack 5.0.0-next
Asset Size Chunks Chunk Names
output.js 1.34 MiB 0 [emitted] main
Entrypoint main = output.js
chunk {0} output.js (main) 1.19 MiB [entry]
> .\example.js main
670 modules
```

View File

@ -0,0 +1 @@
require("../build-common");

View File

@ -0,0 +1,7 @@
console.log(process.env.NODE_ENV);
import "react";
import "react-dom";
import "acorn";
import "core-js";
import "date-fns";

View File

@ -0,0 +1,17 @@
# example.js
``` javascript
{{example.js}}
```
# webpack.config.js
``` javascript
{{webpack.config.js}}
```
# Info
```
{{stdout}}
```

View File

@ -0,0 +1,10 @@
const path = require("path");
module.exports = (env = "development") => ({
mode: env,
cache: {
type: "filesystem",
name: env,
cacheDirectory: path.resolve(__dirname, ".cache"),
warn: true
}
});

View File

@ -7,9 +7,6 @@
const { AsyncParallelHook, AsyncSeriesBailHook, SyncHook } = require("tapable");
/** @typedef {import("webpack-sources").Source} Source */
/** @typedef {import("./Module")} Module */
class Cache {
constructor() {
this.hooks = {
@ -30,8 +27,8 @@ class Cache {
this.hooks.get.callAsync(identifier, etag, callback);
}
store(identifier, etag, source, callback) {
this.hooks.store.callAsync(identifier, etag, source, callback);
store(identifier, etag, data, callback) {
this.hooks.store.callAsync(identifier, etag, data, callback);
}
beginIdle() {

View File

@ -498,10 +498,22 @@ class Compilation {
return callback(null, alreadyAddedModule);
}
const currentProfile = this.profile
? this.moduleGraph.getProfile(module)
: undefined;
if (currentProfile !== undefined) {
currentProfile.markRestoringStart();
}
const cacheName = `${this.compilerPath}/module/${identifier}`;
this.cache.get(cacheName, null, (err, cacheModule) => {
if (err) return callback(err);
if (currentProfile !== undefined) {
currentProfile.markRestoringEnd();
currentProfile.markIntegrationStart();
}
if (cacheModule) {
cacheModule.updateCacheModule(module);
@ -510,6 +522,9 @@ class Compilation {
this._modules.set(identifier, module);
this.modules.add(module);
ModuleGraph.setModuleGraphForModule(module, this.moduleGraph);
if (currentProfile !== undefined) {
currentProfile.markIntegrationEnd();
}
callback(null, module);
});
}
@ -757,6 +772,10 @@ class Compilation {
return callback();
}
if (currentProfile !== undefined) {
moduleGraph.setProfile(newModule, currentProfile);
}
this.addModule(newModule, (err, module) => {
if (err) {
if (!err.module) {
@ -772,17 +791,20 @@ class Compilation {
moduleGraph.setResolvedModule(originModule, dependency, module);
}
if (module === newModule) {
if (moduleGraph.getIssuer(module) === undefined) {
moduleGraph.setIssuer(
module,
originModule !== undefined ? originModule : null
);
}
if (module !== newModule) {
if (currentProfile !== undefined) {
moduleGraph.setProfile(module, currentProfile);
}
if (originModule !== undefined) {
moduleGraph.setIssuer(module, originModule);
}
} else {
if (currentProfile !== undefined) {
currentProfile.mergeInto(moduleGraph.getProfile(module));
const otherProfile = moduleGraph.getProfile(module);
if (otherProfile !== undefined) {
currentProfile.mergeInto(otherProfile);
} else {
moduleGraph.setProfile(module, currentProfile);
}
}
}
@ -2144,7 +2166,11 @@ class Compilation {
source,
chunk
});
this.cache.store(cacheName, usedHash, source, callback);
if (source !== sourceFromCache) {
this.cache.store(cacheName, usedHash, source, callback);
} else {
callback();
}
} catch (err) {
this.errors.push(
new ChunkRenderError(chunk, file || filenameTemplate, err)

View File

@ -5,6 +5,8 @@
"use strict";
const makeSerializable = require("./util/makeSerializable");
/** @typedef {import("./AsyncDependenciesBlock")} AsyncDependenciesBlock */
/** @typedef {import("./ChunkGraph")} ChunkGraph */
/** @typedef {import("./ChunkGroup")} ChunkGroup */
@ -84,6 +86,18 @@ class DependenciesBlock {
return false;
}
serialize({ write }) {
write(this.dependencies);
write(this.blocks);
}
deserialize({ read }) {
this.dependencies = read();
this.blocks = read();
}
}
makeSerializable(DependenciesBlock, "webpack/lib/DependenciesBlock");
module.exports = DependenciesBlock;

View File

@ -127,6 +127,18 @@ class Dependency {
getNumberOfIdOccurrences() {
return 1;
}
serialize({ write }) {
write(this.weak);
write(this.optional);
write(this.loc);
}
deserialize({ read }) {
this.weak = read();
this.optional = read();
this.loc = read();
}
}
Object.defineProperty(Dependency.prototype, "module", {

View File

@ -10,6 +10,7 @@ const DependenciesBlock = require("./DependenciesBlock");
const ModuleGraph = require("./ModuleGraph");
const Template = require("./Template");
const { compareChunksById } = require("./util/comparators");
const makeSerializable = require("./util/makeSerializable");
/** @typedef {import("webpack-sources").Source} Source */
/** @typedef {import("./Chunk")} Chunk */
@ -582,8 +583,10 @@ class Module extends DependenciesBlock {
* @returns {void}
*/
updateCacheModule(module) {
// do nothing
// this method can be overriden
this.type = module.type;
this.context = module.context;
this.factoryMeta = module.factoryMeta;
this.resolveOptions = module.resolveOptions;
}
/**
@ -592,8 +595,38 @@ class Module extends DependenciesBlock {
originalSource() {
return null;
}
serialize(context) {
const { write } = context;
write(this.type);
write(this.context);
write(this.resolveOptions);
write(this.factoryMeta);
write(this.useSourceMap);
write(this.warnings);
write(this.errors);
write(this.buildMeta);
write(this.buildInfo);
super.serialize(context);
}
deserialize(context) {
const { read } = context;
this.type = read();
this.context = read();
this.resolveOptions = read();
this.factoryMeta = read();
this.useSourceMap = read();
this.warnings = read();
this.errors = read();
this.buildMeta = read();
this.buildInfo = read();
super.deserialize(context);
}
}
makeSerializable(Module, "webpack/lib/Module");
Object.defineProperty(Module.prototype, "hasEqualsChunks", {
get() {
throw new Error(

View File

@ -24,7 +24,7 @@ class ModuleGraphModule {
/** @type {Set<ModuleGraphConnection>} */
this.outgoingConnections = new Set();
/** @type {Module | null} */
this.issuer = null;
this.issuer = undefined;
/** @type {(string | OptimizationBailoutFunction)[]} */
this.optimizationBailout = [];
/** @type {false | true | SortableSet<string> | null} */

View File

@ -9,8 +9,10 @@ class ModuleProfile {
constructor() {
this.startTime = Date.now();
this.factory = 0;
this.restoring = 0;
this.integration = 0;
this.building = 0;
this.storing = 0;
this.additionalFactories = 0;
this.additionalIntegration = 0;
}
@ -24,6 +26,15 @@ class ModuleProfile {
this.factory = this.factoryEndTime - this.factoryStartTime;
}
markRestoringStart() {
this.restoringStartTime = Date.now();
}
markRestoringEnd() {
this.restoringEndTime = Date.now();
this.restoring = this.restoringEndTime - this.restoringStartTime;
}
markIntegrationStart() {
this.integrationStartTime = Date.now();
}

View File

@ -23,6 +23,7 @@ const WebpackError = require("./WebpackError");
const compareLocations = require("./compareLocations");
const createHash = require("./util/createHash");
const contextify = require("./util/identifier").contextify;
const makeSerializable = require("./util/makeSerializable");
/** @typedef {import("webpack-sources").Source} Source */
/** @typedef {import("./ChunkGraph")} ChunkGraph */
@ -155,8 +156,8 @@ class NormalModule extends Module {
* @returns {void}
*/
updateCacheModule(module) {
super.updateCacheModule(module);
const m = /** @type {NormalModule} */ (module);
this.type = m.type;
this.request = m.request;
this.userRequest = m.userRequest;
this.rawRequest = m.rawRequest;
@ -165,7 +166,6 @@ class NormalModule extends Module {
this.resource = m.resource;
this.matchResource = m.matchResource;
this.loaders = m.loaders;
this.resolveOptions = m.resolveOptions;
}
createSourceForAsset(name, content, sourceMap) {
@ -634,6 +634,57 @@ class NormalModule extends Module {
hash.update(this._buildHash);
super.updateHash(hash, chunkGraph);
}
serialize(context) {
const { write } = context;
// constructor
write(this.type);
write(this.resource);
// deserialize
write(this._source);
write(this._buildHash);
write(this.buildTimestamp);
write(this.lineToLine);
write(this.error);
write(this._cachedSources);
write(this._lastSuccessfulBuildMeta);
write(this._forceBuild);
super.serialize(context);
}
static deserialize(context) {
const { read } = context;
const obj = new NormalModule({
type: read(),
resource: read(),
// will be filled by updateCacheModule
request: null,
userRequest: null,
rawRequest: null,
loaders: null,
matchResource: null,
parser: null,
generator: null,
resolveOptions: null
});
obj.deserialize(context);
return obj;
}
deserialize(context) {
const { read } = context;
this._source = read();
this._buildHash = read();
this.buildTimestamp = read();
this.lineToLine = read();
this.error = read();
this._cachedSources = read();
this._lastSuccessfulBuildMeta = read();
this._forceBuild = read();
super.deserialize(context);
}
}
makeSerializable(NormalModule, "webpack/lib/NormalModule");
module.exports = NormalModule;

View File

@ -561,8 +561,10 @@ class Stats {
if (!profile) return undefined;
return {
resolving: profile.factory,
restoring: profile.restoring,
building: profile.building,
integration: profile.integration,
storing: profile.storing,
additionalResolving: profile.additionalFactories,
additionalIntegration: profile.additionalIntegration,
// TODO remove this in webpack 6
@ -1261,8 +1263,10 @@ class Stats {
if (m.profile) {
const time =
m.profile.resolving +
m.profile.restoring +
m.profile.integration +
m.profile.building;
m.profile.building +
m.profile.storing;
coloredTime(time);
colors.normal(" ");
}
@ -1271,15 +1275,21 @@ class Stats {
}
coloredTime(
module.profile.resolving +
module.profile.restoring +
module.profile.integration +
module.profile.building
module.profile.building +
module.profile.storing
);
colors.normal(" (resolving: ");
coloredTime(module.profile.resolving);
colors.normal(", restoring: ");
coloredTime(module.profile.restoring);
colors.normal(", integration: ");
coloredTime(module.profile.integration);
colors.normal(", building: ");
coloredTime(module.profile.building);
colors.normal(", storing: ");
coloredTime(module.profile.storing);
if (module.profile.additionalResolving) {
colors.normal(", additional resolving: ");
coloredTime(module.profile.additionalResolving);

View File

@ -457,9 +457,22 @@ class WebpackOptionsApply extends OptionsApply {
new WarnCaseSensitiveModulesPlugin().apply(compiler);
if (options.cache) {
const MemoryCachePlugin = require("./cache/MemoryCachePlugin");
new MemoryCachePlugin().apply(compiler);
if (options.cache && typeof options.cache === "object") {
switch (options.cache.type) {
case "memory": {
const MemoryCachePlugin = require("./cache/MemoryCachePlugin");
new MemoryCachePlugin().apply(compiler);
break;
}
case "filesystem": {
const FileCachePlugin = require("./cache/FileCachePlugin");
new FileCachePlugin(options.cache).apply(compiler);
break;
}
default:
// @ts-ignore never is expected here
throw new Error(`Unknown cache type ${options.cache.type}`);
}
}
compiler.hooks.afterPlugins.call(compiler);

View File

@ -39,7 +39,17 @@ class WebpackOptionsDefaulter extends OptionsDefaulter {
"make",
options => (options.mode === "development" ? "eval" : false)
);
this.set("cache", "make", options => options.mode === "development");
this.set("cache", "call", (value, options) => {
if (value === undefined) {
value = options.mode === "development";
}
if (value === true) {
return {
type: "memory"
};
}
return value;
});
this.set("context", process.cwd());
this.set("target", "web");

197
lib/cache/FileCachePlugin.js vendored Normal file
View File

@ -0,0 +1,197 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const mkdirp = require("mkdirp");
const path = require("path");
const createHash = require("../util/createHash");
const serializer = require("../util/serializer");
/** @typedef {import("webpack-sources").Source} Source */
/** @typedef {import("../../declarations/WebpackOptions").FileCacheOptions} FileCacheOptions */
/** @typedef {import("../Compiler")} Compiler */
/** @typedef {import("../Module")} Module */
const memorize = fn => {
let result = undefined;
return () => {
if (result === undefined) result = fn();
return result;
};
};
const memoryCache = new Map();
class FileCachePlugin {
/**
* @param {FileCacheOptions} options options
*/
constructor(options) {
this.options = options;
}
/**
* @param {Compiler} compiler Webpack compiler
* @returns {void}
*/
apply(compiler) {
const cacheDirectory = path.resolve(
this.options.cacheDirectory || "node_modules/.cache/webpack/",
this.options.name || compiler.name || "default"
);
const hashAlgorithm = this.options.hashAlgorithm || "md4";
const version = this.options.version || "";
const warn = this.options.warn || false;
const log = this.options.log || false;
const store = this.options.store || "idle";
let pendingPromiseFactories = new Map();
const toHash = str => {
const hash = createHash(hashAlgorithm);
hash.update(str);
const digest = hash.digest("hex");
return `${digest.slice(0, 2)}/${digest.slice(2)}`;
};
compiler.hooks.beforeCompile.tapAsync(
"FileCachePlugin",
(params, callback) => {
mkdirp(cacheDirectory, callback);
}
);
compiler.cache.hooks.store.tapPromise(
"FileCachePlugin",
(identifier, etag, data) => {
const entry = { identifier, data: () => data, etag, version };
const filename = path.join(
cacheDirectory,
toHash(identifier) + ".data"
);
memoryCache.set(filename, entry);
const promiseFactory = () =>
serializer
.serializeToFile(entry, filename)
.then(() => {
if (log) {
console.warn(`Cached ${identifier} to ${filename}.`);
}
})
.catch(err => {
if (warn) {
console.warn(`Caching failed for ${identifier}: ${err.stack}`);
}
});
if (store === "instant") {
return promiseFactory();
} else if (store === "idle") {
pendingPromiseFactories.set(filename, promiseFactory);
return Promise.resolve();
} else if (store === "background") {
const promise = promiseFactory();
pendingPromiseFactories.set(filename, () => promise);
return Promise.resolve();
}
}
);
compiler.cache.hooks.get.tapPromise(
"FileCachePlugin",
(identifier, etag) => {
const filename = path.join(
cacheDirectory,
toHash(identifier) + ".data"
);
const memory = memoryCache.get(filename);
if (memory !== undefined) {
return Promise.resolve(
memory.etag === etag && memory.version === version
? memory.data()
: undefined
);
}
return serializer.deserializeFromFile(filename).then(
cacheEntry => {
cacheEntry = {
identifier: cacheEntry.identifier,
etag: cacheEntry.etag,
version: cacheEntry.version,
data: memorize(cacheEntry.data)
};
memoryCache.set(filename, cacheEntry);
if (cacheEntry === undefined) return;
if (cacheEntry.identifier !== identifier) {
if (log) {
console.warn(
`Restored ${identifier} from ${filename}, but identifier doesn't match.`
);
}
return;
}
if (cacheEntry.etag !== etag) {
if (log) {
console.warn(
`Restored ${etag} from ${filename}, but etag doesn't match.`
);
}
return;
}
if (cacheEntry.version !== version) {
if (log) {
console.warn(
`Restored ${version} from ${filename}, but version doesn't match.`
);
}
return;
}
if (log) {
console.warn(`Restored ${identifier} from ${filename}.`);
}
return cacheEntry.data();
},
err => {
if (warn && err && err.code !== "ENOENT") {
console.warn(`Restoring failed for ${identifier}: ${err}`);
}
}
);
}
);
compiler.cache.hooks.shutdown.tapPromise("FileCachePlugin", () => {
isIdle = false;
const promises = Array.from(pendingPromiseFactories.values()).map(fn =>
fn()
);
pendingPromiseFactories.clear();
if (currentIdlePromise !== undefined) promises.push(currentIdlePromise);
return Promise.all(promises);
});
let currentIdlePromise;
let isIdle = false;
const processIdleTasks = () => {
if (isIdle && pendingPromiseFactories.size > 0) {
const promises = [];
const maxTime = Date.now() + 100;
let maxCount = 100;
for (const [filename, factory] of pendingPromiseFactories) {
pendingPromiseFactories.delete(filename);
promises.push(factory());
if (maxCount-- <= 0 || Date.now() > maxTime) break;
}
currentIdlePromise = Promise.all(promises).then(() => {
currentIdlePromise = undefined;
});
currentIdlePromise.then(processIdleTasks);
}
};
compiler.cache.hooks.beginIdle.tap("FileCachePlugin", () => {
isIdle = true;
Promise.resolve().then(processIdleTasks);
});
compiler.cache.hooks.endIdle.tap("FileCachePlugin", () => {
isIdle = false;
});
}
}
module.exports = FileCachePlugin;

View File

@ -5,6 +5,7 @@
"use strict";
const makeSerializable = require("../util/makeSerializable");
const NullDependency = require("./NullDependency");
/** @typedef {import("webpack-sources").ReplaceSource} ReplaceSource */
@ -25,8 +26,35 @@ class AMDDefineDependency extends NullDependency {
get type() {
return "amd define";
}
serialize(context) {
const { write } = context;
write(this.range);
write(this.arrayRange);
write(this.functionRange);
write(this.objectRange);
write(this.namedModule);
write(this.localModule);
super.serialize(context);
}
deserialize(context) {
const { read } = context;
this.range = read();
this.arrayRange = read();
this.functionRange = read();
this.objectRange = read();
this.namedModule = read();
this.localModule = read();
super.deserialize(context);
}
}
makeSerializable(
AMDDefineDependency,
"webpack/lib/dependencies/AMDDefineDependency"
);
AMDDefineDependency.Template = class AMDDefineDependencyTemplate extends NullDependency.Template {
get definitions() {
return {

View File

@ -5,6 +5,7 @@
"use strict";
const makeSerializable = require("../util/makeSerializable");
const ModuleDependency = require("./ModuleDependency");
const ModuleDependencyTemplateAsId = require("./ModuleDependencyTemplateAsId");
@ -21,4 +22,9 @@ class CommonJsRequireDependency extends ModuleDependency {
CommonJsRequireDependency.Template = ModuleDependencyTemplateAsId;
makeSerializable(
CommonJsRequireDependency,
"webpack/lib/dependencies/CommonJsRequireDependency"
);
module.exports = CommonJsRequireDependency;

View File

@ -5,6 +5,7 @@
"use strict";
const makeSerializable = require("../util/makeSerializable");
const NullDependency = require("./NullDependency");
/** @typedef {import("webpack-sources").ReplaceSource} ReplaceSource */
@ -32,8 +33,26 @@ class ConstDependency extends NullDependency {
hash.update(this.range + "");
hash.update(this.expression + "");
}
serialize(context) {
const { write } = context;
write(this.expression);
write(this.range);
write(this.requireWebpackRequire);
super.serialize(context);
}
deserialize(context) {
const { read } = context;
this.expression = read();
this.range = read();
this.requireWebpackRequire = read();
super.deserialize(context);
}
}
makeSerializable(ConstDependency, "webpack/lib/dependencies/ConstDependency");
ConstDependency.Template = class ConstDependencyTemplate extends NullDependency.Template {
/**
* @param {Dependency} dependency the dependency for which the template should be applied

View File

@ -6,6 +6,7 @@
"use strict";
const InitFragment = require("../InitFragment");
const makeSerializable = require("../util/makeSerializable");
const NullDependency = require("./NullDependency");
/** @typedef {import("webpack-sources").ReplaceSource} ReplaceSource */
@ -19,6 +20,11 @@ class HarmonyCompatibilityDependency extends NullDependency {
}
}
makeSerializable(
HarmonyCompatibilityDependency,
"webpack/lib/dependencies/HarmonyCompatibilityDependency"
);
HarmonyCompatibilityDependency.Template = class HarmonyExportDependencyTemplate extends NullDependency.Template {
/**
* @param {Dependency} dependency the dependency for which the template should be applied

View File

@ -5,6 +5,7 @@
"use strict";
const makeSerializable = require("../util/makeSerializable");
const NullDependency = require("./NullDependency");
/** @typedef {import("webpack-sources").ReplaceSource} ReplaceSource */
@ -36,8 +37,29 @@ class HarmonyExportExpressionDependency extends NullDependency {
dependencies: undefined
};
}
serialize(context) {
const { write } = context;
write(this.range);
write(this.rangeStatement);
write(this.prefix);
super.serialize(context);
}
deserialize(context) {
const { read } = context;
this.range = read();
this.rangeStatement = read();
this.prefix = read();
super.deserialize(context);
}
}
makeSerializable(
HarmonyExportExpressionDependency,
"webpack/lib/dependencies/HarmonyExportExpressionDependency"
);
HarmonyExportExpressionDependency.Template = class HarmonyExportDependencyTemplate extends NullDependency.Template {
/**
* @param {Dependency} dependency the dependency for which the template should be applied

View File

@ -5,6 +5,7 @@
"use strict";
const makeSerializable = require("../util/makeSerializable");
const NullDependency = require("./NullDependency");
/** @typedef {import("webpack-sources").ReplaceSource} ReplaceSource */
@ -21,8 +22,27 @@ class HarmonyExportHeaderDependency extends NullDependency {
get type() {
return "harmony export header";
}
serialize(context) {
const { write } = context;
write(this.range);
write(this.rangeStatement);
super.serialize(context);
}
deserialize(context) {
const { read } = context;
this.range = read();
this.rangeStatement = read();
super.deserialize(context);
}
}
makeSerializable(
HarmonyExportHeaderDependency,
"webpack/lib/dependencies/HarmonyExportHeaderDependency"
);
HarmonyExportHeaderDependency.Template = class HarmonyExportDependencyTemplate extends NullDependency.Template {
/**
* @param {Dependency} dependency the dependency for which the template should be applied

View File

@ -6,6 +6,7 @@
"use strict";
const InitFragment = require("../InitFragment");
const makeSerializable = require("../util/makeSerializable");
const NullDependency = require("./NullDependency");
/** @typedef {import("webpack-sources").ReplaceSource} ReplaceSource */
@ -36,8 +37,27 @@ class HarmonyExportSpecifierDependency extends NullDependency {
dependencies: undefined
};
}
serialize(context) {
const { write } = context;
write(this.id);
write(this.name);
super.serialize(context);
}
deserialize(context) {
const { read } = context;
this.id = read();
this.name = read();
super.deserialize(context);
}
}
makeSerializable(
HarmonyExportSpecifierDependency,
"webpack/lib/dependencies/HarmonyExportSpecifierDependency"
);
HarmonyExportSpecifierDependency.Template = class HarmonyExportSpecifierDependencyTemplate extends NullDependency.Template {
/**
* @param {Dependency} dependency the dependency for which the template should be applied

View File

@ -98,6 +98,18 @@ class HarmonyImportDependency extends ModuleDependency {
""
);
}
serialize(context) {
const { write } = context;
write(this.sourceOrder);
super.serialize(context);
}
deserialize(context) {
const { read } = context;
this.sourceOrder = read();
super.deserialize(context);
}
}
module.exports = HarmonyImportDependency;

View File

@ -5,6 +5,7 @@
"use strict";
const makeSerializable = require("../util/makeSerializable");
const HarmonyImportDependency = require("./HarmonyImportDependency");
/** @typedef {import("../Dependency")} Dependency */
@ -39,6 +40,11 @@ class HarmonyImportSideEffectDependency extends HarmonyImportDependency {
}
}
makeSerializable(
HarmonyImportSideEffectDependency,
"webpack/lib/dependencies/HarmonyImportSideEffectDependency"
);
HarmonyImportSideEffectDependency.Template = class HarmonyImportSideEffectDependencyTemplate extends HarmonyImportDependency.Template {
/**
* @param {Dependency} dependency the dependency for which the template should be applied

View File

@ -6,6 +6,7 @@
"use strict";
const HarmonyLinkingError = require("../HarmonyLinkingError");
const makeSerializable = require("../util/makeSerializable");
const DependencyReference = require("./DependencyReference");
const HarmonyImportDependency = require("./HarmonyImportDependency");
@ -183,8 +184,41 @@ class HarmonyImportSpecifierDependency extends HarmonyImportDependency {
hash.update(stringifyUsedExports);
}
}
serialize(context) {
const { write } = context;
write(this.id);
write(this.name);
write(this.range);
write(this.strictExportPresence);
write(this.namespaceObjectAsContext);
write(this.callArgs);
write(this.call);
write(this.directImport);
write(this.shorthand);
super.serialize(context);
}
deserialize(context) {
const { read } = context;
this.id = read();
this.name = read();
this.range = read();
this.strictExportPresence = read();
this.namespaceObjectAsContext = read();
this.callArgs = read();
this.call = read();
this.directImport = read();
this.shorthand = read();
super.deserialize(context);
}
}
makeSerializable(
HarmonyImportSpecifierDependency,
"webpack/lib/dependencies/HarmonyImportSpecifierDependency"
);
HarmonyImportSpecifierDependency.Template = class HarmonyImportSpecifierDependencyTemplate extends HarmonyImportDependency.Template {
/**
* @param {Dependency} dependency the dependency for which the template should be applied

View File

@ -6,6 +6,7 @@
"use strict";
const InitFragment = require("../InitFragment");
const makeSerializable = require("../util/makeSerializable");
const ModuleDependency = require("./ModuleDependency");
/** @typedef {import("webpack-sources").ReplaceSource} ReplaceSource */
@ -30,6 +31,11 @@ class ModuleDecoratorDependency extends ModuleDependency {
}
}
makeSerializable(
ModuleDecoratorDependency,
"webpack/lib/dependencies/ModuleDecoratorDependency"
);
ModuleDecoratorDependency.Template = class ModuleDecoratorDependencyTemplate extends ModuleDependency.Template {
/**
* @param {Dependency} dependency the dependency for which the template should be applied

View File

@ -25,6 +25,22 @@ class ModuleDependency extends Dependency {
getResourceIdentifier() {
return `module${this.request}`;
}
serialize(context) {
const { write } = context;
write(this.request);
write(this.userRequest);
write(this.range);
super.serialize(context);
}
deserialize(context) {
const { read } = context;
this.request = read();
this.userRequest = read();
this.range = read();
super.deserialize(context);
}
}
ModuleDependency.Template = DependencyTemplate;

View File

@ -27,6 +27,14 @@ class NullDependency extends Dependency {
* @returns {void}
*/
updateHash(hash, chunkGraph) {}
serialize(context) {
// do nothing
}
deserialize(context) {
// do nothing
}
}
NullDependency.Template = class NullDependencyTemplate extends DependencyTemplate {

View File

@ -6,6 +6,7 @@
"use strict";
const InitFragment = require("../InitFragment");
const makeSerializable = require("../util/makeSerializable");
const ModuleDependency = require("./ModuleDependency");
/** @typedef {import("webpack-sources").ReplaceSource} ReplaceSource */
@ -45,8 +46,29 @@ class ProvidedDependency extends ModuleDependency {
hash.update(this.identifier);
hash.update(this.path ? this.path.join(",") : "null");
}
serialize(context) {
const { write } = context;
write(this.identifier);
write(this.path);
write(this.range);
super.serialize(context);
}
deserialize(context) {
const { read } = context;
this.identifier = read();
this.path = read();
this.range = read();
super.deserialize(context);
}
}
makeSerializable(
ProvidedDependency,
"webpack/lib/dependencies/ProvidedDependency"
);
class ProvidedDependencyTemplate extends ModuleDependency.Template {
/**
* @param {Dependency} dependency the dependency for which the template should be applied

View File

@ -5,6 +5,7 @@
"use strict";
const makeSerializable = require("../util/makeSerializable");
const NullDependency = require("./NullDependency");
/** @typedef {import("webpack-sources").ReplaceSource} ReplaceSource */
@ -17,8 +18,25 @@ class RequireHeaderDependency extends NullDependency {
if (!Array.isArray(range)) throw new Error("range must be valid");
this.range = range;
}
serialize(context) {
const { write } = context;
write(this.range);
super.serialize(context);
}
static deserialize(context) {
const obj = new RequireHeaderDependency(context.read());
obj.deserialize(context);
return obj;
}
}
makeSerializable(
RequireHeaderDependency,
"webpack/lib/dependencies/RequireHeaderDependency"
);
RequireHeaderDependency.Template = class RequireHeaderDependencyTemplate extends NullDependency.Template {
/**
* @param {Dependency} dependency the dependency for which the template should be applied

View File

@ -0,0 +1,341 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
*/
"use strict";
const SerializerMiddleware = require("./SerializerMiddleware");
/*
Format:
File -> Section*
Section -> NullsSection |
F64NumbersSection |
I32NumbersSection |
I8NumbersSection |
ShortStringSection |
StringSection |
BufferSection |
BooleanSection |
NopSection
NullsSection -> NullsSectionHeaderByte
F64NumbersSection -> F64NumbersSectionHeaderByte f64*
I32NumbersSection -> I32NumbersSectionHeaderByte i32*
I8NumbersSection -> I8NumbersSectionHeaderByte i8*
ShortStringSection -> ShortStringSectionHeaderByte utf8-byte*
StringSection -> StringSectionHeaderByte i32:length utf8-byte*
BufferSection -> BufferSectionHeaderByte i32:length byte*
BooleanSection -> TrueHeaderByte | FalseHeaderByte
NopSection --> NopSectionHeaderByte
ShortStringSectionHeaderByte -> 0b1nnn_nnnn (n:length)
F64NumbersSectionHeaderByte -> 0b001n_nnnn (n:length)
I32NumbersSectionHeaderByte -> 0b010n_nnnn (n:length)
I8NumbersSectionHeaderByte -> 0b011n_nnnn (n:length)
NullsSectionHeaderByte -> 0b0001_nnnn (n:length)
StringSectionHeaderByte -> 0b0000_1110
BufferSectionHeaderByte -> 0b0000_1111
NopSectionHeaderByte -> 0b0000_1011
FalseHeaderByte -> 0b0000_1100
TrueHeaderByte -> 0b0000_1101
RawNumber -> n (n <= 10)
*/
const NOP_HEADER = 0x0b;
const TRUE_HEADER = 0x0c;
const FALSE_HEADER = 0x0d;
const STRING_HEADER = 0x0e;
const BUFFER_HEADER = 0x0f;
const NULLS_HEADER_MASK = 0xf0;
const NULLS_HEADER = 0x10;
const NUMBERS_HEADER_MASK = 0xe0;
const I8_HEADER = 0x60;
const I32_HEADER = 0x40;
const F64_HEADER = 0x20;
const SHORT_STRING_HEADER = 0x80;
const identifyNumber = n => {
if (n === (n | 0)) {
if (n <= 127 && n >= -128) return 0;
if (n <= 2147483647 && n >= -2147483648) return 1;
}
return 2;
};
class BinaryMiddleware extends SerializerMiddleware {
_handleFunctionSerialization(fn, context) {
return () => {
const r = fn();
if (r instanceof Promise)
return r.then(data => this.serialize(data, context));
return this.serialize(r, context);
};
}
_handleFunctionDeserialization(fn, context) {
return () => {
const r = fn();
if (r instanceof Promise)
return r.then(data => this.deserialize(data, context));
return this.deserialize(r, context);
};
}
/**
* @param {any[]} data data items
* @param {TODO} context TODO
* @returns {any[]|Promise<any[]>} serialized data
*/
serialize(data, context) {
/** @type {Buffer} */
let currentBuffer = null;
let currentPosition = 0;
const buffers = [];
const allocate = (bytesNeeded, exact = false) => {
if (currentBuffer !== null) {
if (currentBuffer.length - currentPosition >= bytesNeeded) return;
flush();
}
currentBuffer = Buffer.alloc(
exact ? bytesNeeded : Math.max(bytesNeeded, 1024)
);
};
const flush = () => {
if (currentBuffer !== null) {
buffers.push(currentBuffer.slice(0, currentPosition));
currentBuffer = null;
currentPosition = 0;
}
};
const writeU8 = byte => {
currentBuffer.writeUInt8(byte, currentPosition++);
};
const writeU32 = ui32 => {
currentBuffer.writeUInt32LE(ui32, currentPosition);
currentPosition += 4;
};
for (let i = 0; i < data.length; i++) {
const thing = data[i];
switch (typeof thing) {
case "function": {
flush();
buffers.push(this._handleFunctionSerialization(thing, context));
break;
}
case "string": {
const len = Buffer.byteLength(thing);
if (len > 128) {
allocate(len + 5);
writeU8(STRING_HEADER);
writeU32(len);
} else {
allocate(len + 1);
writeU8(SHORT_STRING_HEADER | len);
}
currentBuffer.write(thing, currentPosition);
currentPosition += len;
break;
}
case "number": {
const type = identifyNumber(thing);
if (type === 0 && thing >= 0 && thing <= 10) {
// shortcut for very small numbers
allocate(1);
writeU8(thing);
break;
}
let n;
for (n = 1; n < 32 && i + n < data.length; n++) {
const item = data[i + n];
if (typeof item !== "number") break;
if (identifyNumber(item) !== type) break;
}
switch (type) {
case 0:
allocate(1 + n);
writeU8(I8_HEADER | (n - 1));
while (n > 0) {
currentBuffer.writeInt8(data[i], currentPosition);
currentPosition++;
n--;
i++;
}
break;
case 1:
allocate(1 + 4 * n);
writeU8(I32_HEADER | (n - 1));
while (n > 0) {
currentBuffer.writeInt32LE(data[i], currentPosition);
currentPosition += 4;
n--;
i++;
}
break;
case 2:
allocate(1 + 8 * n);
writeU8(F64_HEADER | (n - 1));
while (n > 0) {
currentBuffer.writeDoubleLE(data[i], currentPosition);
currentPosition += 8;
n--;
i++;
}
break;
}
i--;
break;
}
case "boolean":
allocate(1);
writeU8(thing === true ? TRUE_HEADER : FALSE_HEADER);
break;
case "object": {
if (thing === null) {
let n;
for (n = 1; n < 16 && i + n < data.length; n++) {
const item = data[i + n];
if (item !== null) break;
}
allocate(1);
writeU8(NULLS_HEADER | (n - 1));
i += n - 1;
} else if (Buffer.isBuffer(thing)) {
allocate(5, true);
writeU8(BUFFER_HEADER);
writeU32(thing.length);
flush();
buffers.push(thing);
}
break;
}
}
}
flush();
return buffers;
}
/**
* @param {any[]} data data items
* @param {TODO} context TODO
* @returns {any[]|Promise<any[]>} deserialized data
*/
deserialize(data, context) {
let currentDataItem = 0;
let currentBuffer = data[0];
let currentPosition = 0;
const checkOverflow = () => {
if (currentPosition >= currentBuffer.length) {
currentPosition = 0;
currentDataItem++;
currentBuffer =
currentDataItem < data.length ? data[currentDataItem] : null;
}
};
const read = n => {
if (currentBuffer === null) throw new Error("Unexpected end of stream");
if (!Buffer.isBuffer(currentBuffer))
throw new Error("Unexpected lazy element in stream");
const rem = currentBuffer.length - currentPosition;
if (rem < n) {
return Buffer.concat([read(rem), read(n - rem)]);
}
const res = currentBuffer.slice(currentPosition, currentPosition + n);
currentPosition += n;
checkOverflow();
return res;
};
const readU8 = () => {
if (currentBuffer === null) throw new Error("Unexpected end of stream");
if (!Buffer.isBuffer(currentBuffer))
throw new Error("Unexpected lazy element in stream");
const byte = currentBuffer.readUInt8(currentPosition);
currentPosition++;
checkOverflow();
return byte;
};
const readU32 = () => {
return read(4).readUInt32LE(0);
};
const result = [];
while (currentBuffer !== null) {
if (typeof currentBuffer === "function") {
result.push(
this._handleFunctionDeserialization(currentBuffer, context)
);
currentDataItem++;
currentBuffer =
currentDataItem < data.length ? data[currentDataItem] : null;
continue;
}
const header = readU8();
switch (header) {
case NOP_HEADER:
break;
case BUFFER_HEADER: {
const len = readU32();
result.push(read(len));
break;
}
case TRUE_HEADER:
result.push(true);
break;
case FALSE_HEADER:
result.push(false);
break;
case STRING_HEADER: {
const len = readU32();
const buf = read(len);
result.push(buf.toString());
break;
}
default:
if (header <= 10) {
result.push(header);
} else if ((header & SHORT_STRING_HEADER) === SHORT_STRING_HEADER) {
const len = header & 0x7f;
const buf = read(len);
result.push(buf.toString());
} else if ((header & NUMBERS_HEADER_MASK) === F64_HEADER) {
const len = header & 0x1f;
const buf = read(8 * len + 8);
for (let i = 0; i <= len; i++) {
result.push(buf.readDoubleLE(i * 8));
}
} else if ((header & NUMBERS_HEADER_MASK) === I32_HEADER) {
const len = header & 0x1f;
const buf = read(4 * len + 4);
for (let i = 0; i <= len; i++) {
result.push(buf.readInt32LE(i * 4));
}
} else if ((header & NUMBERS_HEADER_MASK) === I8_HEADER) {
const len = header & 0x1f;
const buf = read(len + 1);
for (let i = 0; i <= len; i++) {
result.push(buf.readInt8(i));
}
} else if ((header & NULLS_HEADER_MASK) === NULLS_HEADER) {
const len = header & 0x0f;
for (let i = 0; i <= len; i++) {
result.push(null);
}
} else {
throw new Error(`Unexpected header byte 0x${header.toString(16)}`);
}
break;
}
}
return result;
}
}
module.exports = BinaryMiddleware;

View File

@ -0,0 +1,208 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
*/
"use strict";
const fs = require("fs");
const mkdirp = require("mkdirp");
const path = require("path");
const SerializerMiddleware = require("./SerializerMiddleware");
class Section {
constructor(items) {
this.items = items;
this.parts = undefined;
this.length = NaN;
this.offset = NaN;
}
resolve() {
let hasPromise = false;
let lastPart = undefined;
const parts = [];
let length = 0;
for (const item of this.items) {
if (typeof item === "function") {
const r = item();
if (r instanceof Promise) {
parts.push(r.then(items => new Section(items).resolve()));
hasPromise = true;
} else {
parts.push(new Section(r).resolve());
}
length += 12; // 0, offset, size
lastPart = undefined;
} else if (lastPart) {
lastPart.push(item);
length += item.length;
} else {
length += 4; // size
length += item.length;
lastPart = [item];
parts.push(lastPart);
}
}
this.length = length;
if (hasPromise) {
return Promise.all(parts).then(parts => {
this.parts = parts;
return this;
});
} else {
this.parts = parts;
return this;
}
}
getSections() {
return this.parts.filter(p => p instanceof Section);
}
emit(out) {
for (const part of this.parts) {
if (part instanceof Section) {
const pointerBuf = Buffer.alloc(12);
pointerBuf.writeUInt32LE(0, 0);
pointerBuf.writeUInt32LE(part.offset, 4);
pointerBuf.writeUInt32LE(part.length, 8);
out.push(pointerBuf);
} else {
const sizeBuf = Buffer.alloc(4);
out.push(sizeBuf);
let len = 0;
for (const buf of part) {
len += buf.length;
out.push(buf);
}
sizeBuf.writeUInt32LE(len, 0);
}
}
}
}
const createPointer = (filename, offset, size) => {
return () => {
return new Promise((resolve, reject) => {
// TODO handle concurrent access to file
fs.open(filename, "r", (err, file) => {
if (err) return reject(err);
readSection(filename, file, offset, size, (readErr, parts) => {
fs.close(file, err => {
if (err) return reject(err);
if (readErr) return reject(readErr);
resolve(parts);
});
});
});
});
};
};
const readSection = (filename, file, offset, size, callback) => {
const buffer = Buffer.alloc(size);
fs.read(file, buffer, 0, size, offset, err => {
if (err) return callback(err);
const result = [];
let pos = 0;
while (pos < buffer.length) {
const len = buffer.readUInt32LE(pos);
pos += 4;
if (len === 0) {
const pOffset = buffer.readUInt32LE(pos);
pos += 4;
const pSize = buffer.readUInt32LE(pos);
pos += 4;
result.push(createPointer(filename, pOffset, pSize));
} else {
const buf = buffer.slice(pos, pos + len);
pos += len;
result.push(buf);
}
}
callback(null, result);
});
};
class FileMiddleware extends SerializerMiddleware {
/**
* @param {any[]} data data items
* @param {TODO} context TODO
* @returns {any[]|Promise<any[]>} serialized data
*/
serialize(data, { filename }) {
const root = new Section(data);
const r = root.resolve();
return Promise.resolve(r).then(() => {
// calc positions in file
let currentOffset = 4;
const processOffsets = section => {
section.offset = currentOffset;
currentOffset += section.length;
for (const child of section.getSections()) {
processOffsets(child);
}
};
processOffsets(root);
// get buffers to write
const sizeBuf = Buffer.alloc(4);
sizeBuf.writeUInt32LE(root.length, 0);
const buffers = [sizeBuf];
const emit = (section, out) => {
section.emit(out);
for (const child of section.getSections()) {
emit(child, out);
}
};
emit(root, buffers);
// write to file
return new Promise((resolve, reject) => {
mkdirp(path.dirname(filename), err => {
if (err) return reject(err);
fs.writeFile(filename, Buffer.concat(buffers), err => {
if (err) return reject(err);
resolve();
});
});
});
});
}
/**
* @param {any[]} data data items
* @param {TODO} context TODO
* @returns {any[]|Promise<any[]>} deserialized data
*/
deserialize(data, { filename }) {
return new Promise((resolve, reject) => {
fs.open(filename, "r", (err, file) => {
if (err) return reject(err);
const sizeBuf = Buffer.alloc(4);
fs.read(file, sizeBuf, 0, 4, 0, err => {
if (err) return reject(err);
const rootSize = sizeBuf.readUInt32LE(0);
readSection(filename, file, 4, rootSize, (readErr, parts) => {
fs.close(file, err => {
if (err) return reject(err);
if (readErr) return reject(readErr);
resolve(parts);
});
});
});
});
});
}
}
module.exports = FileMiddleware;

View File

@ -0,0 +1,25 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
*/
"use strict";
class MapObjectSerializer {
serialize(obj, { write }) {
write(obj.size);
for (const [key, value] of obj) {
write(key);
write(value);
}
}
deserialize({ read }) {
let size = read();
const map = new Map();
for (let i = 0; i < size; i++) {
map.set(read(), read());
}
return map;
}
}
module.exports = MapObjectSerializer;

View File

@ -0,0 +1,304 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
*/
"use strict";
const MapObjectSerializer = require("./MapObjectSerializer");
const PlainObjectSerializer = require("./PlainObjectSerializer");
const SerializerMiddleware = require("./SerializerMiddleware");
const SetObjectSerializer = require("./SetObjectSerializer");
/** @typedef {new (...params: any[]) => any} Constructor */
/*
Format:
File -> Section*
Section -> ObjectSection | ReferenceSection | EscapeSection | OtherSection
ObjectSection -> ESCAPE (
null number:relativeOffset |
string:request (string|null):export
) Section:value* ESCAPE ESCAPE_END_OBJECT
ReferenceSection -> ESCAPE number:relativeOffset
EscapeSection -> ESCAPE ESCAPE_ESCAPE_VALUE (escaped value ESCAPE)
EscapeSection -> ESCAPE ESCAPE_UNDEFINED (escaped value ESCAPE)
OtherSection -> any (except ESCAPE)
Why using null as escape value?
Multiple null values can merged by the BinaryMiddleware, which makes it very efficient
Technically any value can be used.
*/
/**
* @typedef {Object} ObjectSerializerContext
* @property {function(any): void} write
*/
/**
* @typedef {Object} ObjectDeserializerContext
* @property {function(): any} read
*/
/**
* @typedef {Object} ObjectSerializer
* @property {function(any, ObjectSerializerContext): void} serialize
* @property {function(ObjectDeserializerContext): any} deserialize
*/
const ESCAPE = null;
const ESCAPE_ESCAPE_VALUE = 1;
const ESCAPE_END_OBJECT = 2;
const ESCAPE_UNDEFINED = 3;
const CURRENT_VERSION = 1;
const plainObjectSerializer = new PlainObjectSerializer();
const mapObjectSerializer = new MapObjectSerializer();
const setObjectSerializer = new SetObjectSerializer();
const serializers = new Map();
const serializerInversed = new Map();
const loadedRequests = new Set();
serializers.set(Object, {
request: null,
name: null,
serializer: plainObjectSerializer
});
serializers.set(Array, {
request: null,
name: null,
serializer: plainObjectSerializer
});
serializers.set(Map, {
request: null,
name: 1,
serializer: mapObjectSerializer
});
serializers.set(Set, {
request: null,
name: 2,
serializer: setObjectSerializer
});
for (const { request, name, serializer } of serializers.values()) {
serializerInversed.set(`${request}/${name}`, serializer);
}
class ObjectMiddleware extends SerializerMiddleware {
/**
* @param {Constructor} Constructor the constructor
* @param {string} request the request which will be required when deserializing
* @param {string} name the name to make multiple serializer unique when sharing a request
* @param {ObjectSerializer} serializer the serializer
* @returns {void}
*/
static register(Constructor, request, name, serializer) {
const key = request + "/" + name;
if (serializers.has(Constructor)) {
throw new Error(
`ObjectMiddleware.register: serializer for ${
Constructor.name
} is already registered`
);
}
if (serializerInversed.has(key)) {
throw new Error(
`ObjectMiddleware.register: serializer for ${key} is already registered`
);
}
serializers.set(Constructor, {
request,
name,
serializer
});
serializerInversed.set(key, serializer);
}
static getSerializerFor(object) {
const c = object.constructor;
const config = serializers.get(c);
if (!config) throw new Error(`No serializer registered for ${c.name}`);
return config;
}
static getDeserializerFor(request, name) {
const key = request + "/" + name;
const serializer = serializerInversed.get(key);
if (serializer === undefined) {
throw new Error(`No deserializer registered for ${key}`);
}
return serializer;
}
_handleFunctionSerialization(fn, context) {
return () => {
const r = fn();
if (r instanceof Promise)
return r.then(data => this.serialize([data], context));
return this.serialize([r], context);
};
}
_handleFunctionDeserialization(fn, context) {
return () => {
const r = fn();
if (r instanceof Promise)
return r.then(data => this.deserialize(data, context)[0]);
return this.deserialize(r, context)[0];
};
}
/**
* @param {any[]} data data items
* @param {TODO} context TODO
* @returns {any[]|Promise<any[]>} serialized data
*/
serialize(data, context) {
/** @type {any[]} */
const result = [CURRENT_VERSION];
let currentPos = 0;
const referenceable = new Map();
const addReferenceable = item => {
referenceable.set(item, currentPos++);
};
let currentPosTypeLookup = 0;
const objectTypeLookup = new Map();
const process = item => {
const ref = referenceable.get(item);
if (ref !== undefined) {
result.push(ESCAPE, ref - currentPos);
return;
}
if (typeof item === "object" && item !== null) {
const { request, name, serializer } = ObjectMiddleware.getSerializerFor(
item
);
const key = `${request}/${name}`;
const lastIndex = objectTypeLookup.get(key);
if (lastIndex === undefined) {
objectTypeLookup.set(key, currentPosTypeLookup++);
result.push(ESCAPE, request, name);
} else {
result.push(ESCAPE, null, lastIndex - currentPosTypeLookup);
}
serializer.serialize(item, {
write(value) {
process(value);
}
});
result.push(ESCAPE, ESCAPE_END_OBJECT);
addReferenceable(item);
} else if (typeof item === "string") {
addReferenceable(item);
result.push(item);
} else if (Buffer.isBuffer(item)) {
addReferenceable(item);
result.push(item);
} else if (item === ESCAPE) {
result.push(ESCAPE, ESCAPE_ESCAPE_VALUE);
} else if (typeof item === "function") {
result.push(this._handleFunctionSerialization(item));
} else if (item === undefined) {
result.push(ESCAPE, ESCAPE_UNDEFINED);
} else {
result.push(item);
}
};
for (const item of data) {
process(item);
}
return result;
}
/**
* @param {any[]} data data items
* @param {TODO} context TODO
* @returns {any[]|Promise<any[]>} deserialized data
*/
deserialize(data, context) {
let currentDataPos = 0;
const read = () => {
if (currentDataPos >= data.length)
throw new Error("Unexpected end of stream");
return data[currentDataPos++];
};
if (read() !== CURRENT_VERSION)
throw new Error("Version missmatch, serializer changed");
let currentPos = 0;
const referenceable = new Map();
const addReferenceable = item => {
referenceable.set(currentPos++, item);
};
let currentPosTypeLookup = 0;
const objectTypeLookup = new Map();
const result = [];
const decodeValue = () => {
const item = read();
if (item === ESCAPE) {
const nextItem = read();
if (nextItem === ESCAPE_ESCAPE_VALUE) {
return ESCAPE;
} else if (nextItem === ESCAPE_UNDEFINED) {
return undefined;
} else if (nextItem === ESCAPE_END_OBJECT) {
throw new Error("Unexpected end of object");
} else if (typeof nextItem === "number") {
// relative reference
return referenceable.get(currentPos + nextItem);
} else {
let request = nextItem;
let name = read();
let serializer;
if (typeof name === "number" && name < 0) {
serializer = objectTypeLookup.get(currentPosTypeLookup + name);
} else {
if (request && !loadedRequests.has(request)) {
require(request);
loadedRequests.add(request);
}
serializer = ObjectMiddleware.getDeserializerFor(request, name);
objectTypeLookup.set(currentPosTypeLookup++, serializer);
}
const item = serializer.deserialize({
read() {
const item = decodeValue();
return item;
}
});
const end1 = read();
if (end1 !== ESCAPE) {
throw new Error("Expected end of object");
}
const end2 = read();
if (end2 !== ESCAPE_END_OBJECT) {
throw new Error("Expected end of object");
}
addReferenceable(item);
return item;
}
} else if (typeof item === "string") {
addReferenceable(item);
return item;
} else if (Buffer.isBuffer(item)) {
addReferenceable(item);
return item;
} else if (typeof item === "function") {
return this._handleFunctionDeserialization(item, context);
} else {
return item;
}
};
while (currentDataPos < data.length) {
result.push(decodeValue());
}
return result;
}
}
module.exports = ObjectMiddleware;

View File

@ -0,0 +1,48 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
*/
"use strict";
class PlainObjectSerializer {
serialize(obj, { write }) {
if (Array.isArray(obj)) {
write(obj.length);
for (const item of obj) {
write(item);
}
} else {
const keys = Object.keys(obj);
for (const key of keys) {
write(key);
}
write(null);
for (const key of keys) {
write(obj[key]);
}
}
}
deserialize({ read }) {
let key = read();
if (typeof key === "number") {
const array = [];
for (let i = 0; i < key; i++) {
array.push(read());
}
return array;
} else {
const obj = {};
const keys = [];
while (key !== null) {
keys.push(key);
key = read();
}
for (const key of keys) {
obj[key] = read();
}
return obj;
}
}
}
module.exports = PlainObjectSerializer;

View File

@ -0,0 +1,57 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
*/
"use strict";
class Serializer {
constructor(middlewares, options = {}) {
this.middlewares = middlewares;
this.options = options;
}
serializeToFile(obj, filename) {
const context = {
filename
};
return new Promise((resolve, reject) =>
resolve(
this.middlewares.reduce((last, middleware) => {
if (last instanceof Promise) {
return last.then(data => middleware.serialize(data, context));
} else {
try {
return middleware.serialize(last, context);
} catch (err) {
return Promise.resolve().then(() => {
throw err;
});
}
}
}, this.options.singleItem ? [obj] : obj)
)
);
}
deserializeFromFile(filename) {
const context = {
filename
};
return Promise.resolve()
.then(() =>
this.middlewares
.slice()
.reverse()
.reduce((last, middleware) => {
if (last instanceof Promise)
return last.then(data => middleware.deserialize(data, context));
else return middleware.deserialize(last, context);
}, [])
)
.then(array => {
return this.options.singleItem ? array[0] : array;
});
}
}
module.exports = Serializer;

View File

@ -0,0 +1,31 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
*/
"use strict";
class SerializerMiddleware {
/**
* @param {any[]} data data items
* @param {TODO} context TODO
* @returns {any[]|Promise<any[]>} serialized data
*/
serialize(data, context) {
throw new Error(
"Serializer.serialize is abstract and need to be overwritten"
);
}
/**
* @param {any[]} data data items
* @param {TODO} context TODO
* @returns {any[]|Promise<any[]>} deserialized data
*/
deserialize(data, context) {
throw new Error(
"Serializer.deserialize is abstract and need to be overwritten"
);
}
}
module.exports = SerializerMiddleware;

View File

@ -0,0 +1,24 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
*/
"use strict";
class SetObjectSerializer {
serialize(obj, { write }) {
write(obj.size);
for (const value of obj) {
write(value);
}
}
deserialize({ read }) {
let size = read();
const set = new Set();
for (let i = 0; i < size; i++) {
set.add(read());
}
return set;
}
}
module.exports = SetObjectSerializer;

View File

@ -0,0 +1,29 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
*/
"use strict";
const SerializerMiddleware = require("./SerializerMiddleware");
class TextMiddleware extends SerializerMiddleware {
/**
* @param {any[]} data data items
* @param {TODO} context TODO
* @returns {any[]|Promise<any[]>} serialized data
*/
serialize(data, context) {
return [Buffer.from(JSON.stringify(data))];
}
/**
* @param {any[]} data data items
* @param {TODO} context TODO
* @returns {any[]|Promise<any[]>} deserialized data
*/
deserialize(data, context) {
return JSON.parse(Buffer.concat(data).toString());
}
}
module.exports = TextMiddleware;

View File

@ -0,0 +1,69 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
*/
"use strict";
const ObjectMiddleware = require("../serialization/ObjectMiddleware");
const createHash = require("./createHash");
const getPrototypeChain = C => {
const chain = [];
let current = C.prototype;
while (current !== Object.prototype) {
chain.push(current);
current = Object.getPrototypeOf(current);
}
return chain;
};
class ClassSerializer {
constructor(Constructor) {
this.Constructor = Constructor;
this.hash = null;
}
_createHash() {
const hash = createHash("md4");
const prototypeChain = getPrototypeChain(this.Constructor);
if (typeof this.Constructor.deserialize === "function")
hash.update(this.Constructor.deserialize.toString());
for (const p of prototypeChain) {
if (typeof p.serialize === "function") {
hash.update(p.serialize.toString());
}
if (typeof p.deserialize === "function") {
hash.update(p.deserialize.toString());
}
}
this.hash = hash.digest("base64");
}
serialize(obj, context) {
if (!this.hash) this._createHash();
context.write(this.hash);
obj.serialize(context);
}
deserialize(context) {
if (!this.hash) this._createHash();
const hash = context.read();
if (this.hash !== hash)
throw new Error(`Version missmatch for ${this.Constructor.name}`);
if (typeof this.Constructor.deserialize === "function") {
return this.Constructor.deserialize(context);
}
const obj = new this.Constructor();
obj.deserialize(context);
return obj;
}
}
module.exports = (Constructor, request, name = null) => {
ObjectMiddleware.register(
Constructor,
request,
name,
new ClassSerializer(Constructor)
);
};

View File

@ -0,0 +1,133 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const ObjectMiddleware = require("../serialization/ObjectMiddleware");
const SourceLocation = require("acorn").SourceLocation;
const CachedSource = require("webpack-sources").CachedSource;
const OriginalSource = require("webpack-sources").OriginalSource;
const RawSource = require("webpack-sources").RawSource;
const SourceMapSource = require("webpack-sources").SourceMapSource;
/** @typedef {import("../Dependency").RealDependencyLocation} RealDependencyLocation */
const CURRENT_MODULE = "webpack/lib/util/registerExternalSerializer";
ObjectMiddleware.register(
CachedSource,
CURRENT_MODULE,
"webpack-sources/CachedSource",
new class CachedSourceSerializer {
/**
* @param {CachedSource} source the cached source to be serialized
* @param {ObjectMiddleware.ObjectSerializerContext} context context
* @returns {void}
*/
serialize(source, { write }) {
const data = source.sourceAndMap({});
write(data.source);
write(JSON.stringify(data.map));
}
/**
* @param {ObjectMiddleware.ObjectDeserializerContext} context context
* @returns {CachedSource} cached source
*/
deserialize({ read }) {
const source = read();
const map = read();
return new CachedSource(new SourceMapSource(source, "unknown", map));
}
}()
);
ObjectMiddleware.register(
RawSource,
CURRENT_MODULE,
"webpack-sources/RawSource",
new class RawSourceSerializer {
/**
* @param {RawSource} source the raw source to be serialized
* @param {ObjectMiddleware.ObjectSerializerContext} context context
* @returns {void}
*/
serialize(source, { write }) {
const data = source.source();
write(data);
}
/**
* @param {ObjectMiddleware.ObjectDeserializerContext} context context
* @returns {RawSource} raw source
*/
deserialize({ read }) {
const source = read();
return new RawSource(source);
}
}()
);
ObjectMiddleware.register(
OriginalSource,
CURRENT_MODULE,
"webpack-sources/OriginalSource",
new class OriginalSourceSerializer {
/**
* @param {OriginalSource} source the original source to be serialized
* @param {ObjectMiddleware.ObjectSerializerContext} context context
* @returns {void}
*/
serialize(source, { write }) {
write(source.source());
write(source._name);
}
/**
* @param {ObjectMiddleware.ObjectDeserializerContext} context context
* @returns {OriginalSource} original source
*/
deserialize({ read }) {
return new OriginalSource(read(), read());
}
}()
);
ObjectMiddleware.register(
SourceLocation,
CURRENT_MODULE,
"acorn/SourceLocation",
new class SourceLocationSerializer {
/**
* @param {SourceLocation} loc the location to be serialized
* @param {ObjectMiddleware.ObjectSerializerContext} context context
* @returns {void}
*/
serialize(loc, { write }) {
write(loc.start.line);
write(loc.start.column);
write(loc.end.line);
write(loc.end.column);
}
/**
* @param {ObjectMiddleware.ObjectDeserializerContext} context context
* @returns {RealDependencyLocation} location
*/
deserialize({ read }) {
return {
start: {
line: read(),
column: read()
},
end: {
line: read(),
column: read()
}
};
}
}()
);

22
lib/util/serializer.js Normal file
View File

@ -0,0 +1,22 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const BinaryMiddleware = require("../serialization/BinaryMiddleware");
const FileMiddleware = require("../serialization/FileMiddleware");
const ObjectMiddleware = require("../serialization/ObjectMiddleware");
const Serializer = require("../serialization/Serializer");
const serializer = new Serializer(
[new ObjectMiddleware(), new BinaryMiddleware(), new FileMiddleware()],
{
singleItem: true
}
);
require("./registerExternalSerializer");
module.exports = serializer;

View File

@ -155,6 +155,46 @@
}
]
},
"FileCacheOptions": {
"type": "object",
"additionalProperties": false,
"properties": {
"cacheDirectory": {
"description": "Base directory for the cache (defaults to node_modules/.cache/webpack).",
"type": "string",
"absolutePath": true
},
"hashAlgorithm": {
"description": "Algorithm used for generation the hash (see node.js crypto package)",
"type": "string"
},
"log": {
"description": "Display log info when cache in accessed.",
"type": "boolean"
},
"name": {
"description": "Name for the cache. Different names will lead to different coexisting caches.",
"type": "string"
},
"store": {
"description": "When to store data to the filesystem. (idle: Store data when compiler is idle; background: Store data in background while compiling, but doesn't block the compilation; instant: Store data when creating blocking compilation until data is stored; defaults to idle)",
"enum": ["idle", "background", "instant"]
},
"type": {
"description": "Filesystem caching",
"enum": ["filesystem"]
},
"version": {
"description": "Version of the cache data. Different versions won't allow to reuse the cache and override existing content. Update the version when config changed in a way which doesn't allow to reuse cache. This will invalidate the cache.",
"type": "string"
},
"warn": {
"description": "Display warnings when (de)serialization of data failed.",
"type": "boolean"
}
},
"required": ["type"]
},
"FilterItemTypes": {
"anyOf": [
{
@ -213,6 +253,17 @@
}
}
},
"MemoryCacheOptions": {
"type": "object",
"additionalProperties": false,
"properties": {
"type": {
"description": "In memory caching",
"enum": ["memory"]
}
},
"required": ["type"]
},
"ModuleOptions": {
"type": "object",
"additionalProperties": false,
@ -1897,12 +1948,28 @@
"description": "Cache generated modules and chunks to improve performance for multiple incremental builds.",
"anyOf": [
{
"description": "You can pass `false` to disable it.",
"type": "boolean"
"description": "Disable caching.",
"enum": [false]
},
{
"description": "You can pass an object to enable it and let webpack use the passed object as cache. This way you can share the cache object between multiple compiler calls.",
"type": "object"
"description": "Enable in memory caching.",
"enum": [true]
},
{
"description": "Options for memory caching.",
"anyOf": [
{
"$ref": "#/definitions/MemoryCacheOptions"
}
]
},
{
"description": "Options for persistent caching.",
"anyOf": [
{
"$ref": "#/definitions/FileCacheOptions"
}
]
}
]
},

View File

@ -505,28 +505,28 @@ chunk {0} bundle.js (main) 73 bytes >{1}< >{2}< [entry] [rendered]
> ./index main
[0] ./index.js 51 bytes {0} [built]
entry ./index main
Xms (resolving: Xms, integration: Xms, building: Xms)
Xms (resolving: Xms, restoring: Xms, integration: Xms, building: Xms, storing: Xms)
[1] ./a.js 22 bytes {0} [built]
cjs require ./a [0] ./index.js 1:0-14
[0] Xms -> Xms (resolving: Xms, integration: Xms, building: Xms)
[0] Xms -> Xms (resolving: Xms, restoring: Xms, integration: Xms, building: Xms, storing: Xms)
chunk {1} 1.bundle.js 22 bytes <{0}> [rendered]
> ./b [0] ./index.js 2:0-16
[2] ./b.js 22 bytes {1} [built]
amd require ./b [0] ./index.js 2:0-16
[0] Xms -> Xms (resolving: Xms, integration: Xms, building: Xms)
[0] Xms -> Xms (resolving: Xms, restoring: Xms, integration: Xms, building: Xms, storing: Xms)
chunk {2} 2.bundle.js 54 bytes <{0}> >{3}< [rendered]
> ./c [0] ./index.js 3:0-16
[3] ./c.js 54 bytes {2} [built]
amd require ./c [0] ./index.js 3:0-16
[0] Xms -> Xms (resolving: Xms, integration: Xms, building: Xms)
[0] Xms -> Xms (resolving: Xms, restoring: Xms, integration: Xms, building: Xms, storing: Xms)
chunk {3} 3.bundle.js 44 bytes <{2}> [rendered]
> [3] ./c.js 1:0-52
[4] ./d.js 22 bytes {3} [built]
require.ensure item ./d [3] ./c.js 1:0-52
[0] Xms -> [3] Xms -> Xms (resolving: Xms, integration: Xms, building: Xms)
[0] Xms -> [3] Xms -> Xms (resolving: Xms, restoring: Xms, integration: Xms, building: Xms, storing: Xms)
[5] ./e.js 22 bytes {3} [built]
require.ensure item ./e [3] ./c.js 1:0-52
[0] Xms -> [3] Xms -> Xms (resolving: Xms, integration: Xms, building: Xms)"
[0] Xms -> [3] Xms -> Xms (resolving: Xms, restoring: Xms, integration: Xms, building: Xms, storing: Xms)"
`;
exports[`StatsTestCases should print correct stats for chunks-development 1`] = `
@ -543,29 +543,29 @@ chunk {0} 0.bundle.js 60 bytes <{c}> [rendered]
> [./c.js] ./c.js 1:0-52
[./d.js] 22 bytes {0} [built]
require.ensure item ./d [./c.js] 1:0-52
[./index.js] Xms -> [./c.js] Xms -> Xms (resolving: Xms, integration: Xms, building: Xms)
[./index.js] Xms -> [./c.js] Xms -> Xms (resolving: Xms, restoring: Xms, integration: Xms, building: Xms, storing: Xms)
[./e.js] 38 bytes {0} [built]
require.ensure item ./e [./c.js] 1:0-52
[./index.js] Xms -> [./c.js] Xms -> Xms (resolving: Xms, integration: Xms, building: Xms)
[./index.js] Xms -> [./c.js] Xms -> Xms (resolving: Xms, restoring: Xms, integration: Xms, building: Xms, storing: Xms)
chunk {b} b.bundle.js 22 bytes <{main}> [rendered]
> ./b [./index.js] ./index.js 2:0-16
[./b.js] 22 bytes {b} [built]
amd require ./b [./index.js] 2:0-16
[./index.js] Xms -> Xms (resolving: Xms, integration: Xms, building: Xms)
[./index.js] Xms -> Xms (resolving: Xms, restoring: Xms, integration: Xms, building: Xms, storing: Xms)
chunk {c} c.bundle.js 54 bytes <{main}> >{0}< [rendered]
> ./c [./index.js] ./index.js 3:0-16
[./c.js] 54 bytes {c} [built]
amd require ./c [./index.js] 3:0-16
[./index.js] Xms -> Xms (resolving: Xms, integration: Xms, building: Xms)
[./index.js] Xms -> Xms (resolving: Xms, restoring: Xms, integration: Xms, building: Xms, storing: Xms)
chunk {main} bundle.js (main) 73 bytes >{b}< >{c}< [entry] [rendered]
> ./index main
[./a.js] 22 bytes {main} [built]
cjs require ./a [./e.js] 1:0-14
cjs require ./a [./index.js] 1:0-14
[./index.js] Xms -> Xms (resolving: Xms, integration: Xms, building: Xms)
[./index.js] Xms -> Xms (resolving: Xms, restoring: Xms, integration: Xms, building: Xms, storing: Xms)
[./index.js] 51 bytes {main} [built]
entry ./index main
Xms (resolving: Xms, integration: Xms, building: Xms)"
Xms (resolving: Xms, restoring: Xms, integration: Xms, building: Xms, storing: Xms)"
`;
exports[`StatsTestCases should print correct stats for circular-correctness 1`] = `
@ -2051,33 +2051,33 @@ chunk {0} main.js (main) 73 bytes >{1}< >{2}< [entry] [rendered]
[0] ./index.js 51 bytes {0} [depth 0] [built]
ModuleConcatenation bailout: Module is not an ECMAScript module
entry ./index main
Xms (resolving: Xms, integration: Xms, building: Xms)
Xms (resolving: Xms, restoring: Xms, integration: Xms, building: Xms, storing: Xms)
[1] ./a.js 22 bytes {0} [depth 1] [built]
ModuleConcatenation bailout: Module is not an ECMAScript module
cjs require ./a [0] ./index.js 1:0-14
[0] Xms -> Xms (resolving: Xms, integration: Xms, building: Xms)
[0] Xms -> Xms (resolving: Xms, restoring: Xms, integration: Xms, building: Xms, storing: Xms)
chunk {1} 1.js 22 bytes <{0}> [rendered]
> ./b [0] ./index.js 2:0-16
[2] ./b.js 22 bytes {1} [depth 1] [built]
ModuleConcatenation bailout: Module is not an ECMAScript module
amd require ./b [0] ./index.js 2:0-16
[0] Xms -> Xms (resolving: Xms, integration: Xms, building: Xms)
[0] Xms -> Xms (resolving: Xms, restoring: Xms, integration: Xms, building: Xms, storing: Xms)
chunk {2} 2.js 54 bytes <{0}> >{3}< [rendered]
> ./c [0] ./index.js 3:0-16
[3] ./c.js 54 bytes {2} [depth 1] [built]
ModuleConcatenation bailout: Module is not an ECMAScript module
amd require ./c [0] ./index.js 3:0-16
[0] Xms -> Xms (resolving: Xms, integration: Xms, building: Xms)
[0] Xms -> Xms (resolving: Xms, restoring: Xms, integration: Xms, building: Xms, storing: Xms)
chunk {3} 3.js 44 bytes <{2}> [rendered]
> [3] ./c.js 1:0-52
[4] ./d.js 22 bytes {3} [depth 2] [built]
ModuleConcatenation bailout: Module is not an ECMAScript module
require.ensure item ./d [3] ./c.js 1:0-52
[0] Xms -> [3] Xms -> Xms (resolving: Xms, integration: Xms, building: Xms)
[0] Xms -> [3] Xms -> Xms (resolving: Xms, restoring: Xms, integration: Xms, building: Xms, storing: Xms)
[5] ./e.js 22 bytes {3} [depth 2] [built]
ModuleConcatenation bailout: Module is not an ECMAScript module
require.ensure item ./e [3] ./c.js 1:0-52
[0] Xms -> [3] Xms -> Xms (resolving: Xms, integration: Xms, building: Xms)"
[0] Xms -> [3] Xms -> Xms (resolving: Xms, restoring: Xms, integration: Xms, building: Xms, storing: Xms)"
`;
exports[`StatsTestCases should print correct stats for resolve-plugin-context 1`] = `
@ -2342,7 +2342,7 @@ bundle.js 3.57 KiB 0 [emitted] main
Entrypoint main = bundle.js
[0] ./index.js 0 bytes {0} [built]
entry ./index main
Xms (resolving: Xms, integration: Xms, building: Xms)"
Xms (resolving: Xms, restoring: Xms, integration: Xms, building: Xms, storing: Xms)"
`;
exports[`StatsTestCases should print correct stats for split-chunks 1`] = `

View File

@ -0,0 +1,73 @@
const path = require("path");
const BinaryMiddleware = require("../lib/serialization/BinaryMiddleware");
const FileMiddleware = require("../lib/serialization/FileMiddleware");
const Serializer = require("../lib/serialization/Serializer");
const serializer = new Serializer([
new BinaryMiddleware(),
new FileMiddleware()
]);
const ESCAPE = null;
const ESCAPE_ESCAPE_VALUE = 1;
const ESCAPE_END_OBJECT = 2;
const ESCAPE_UNDEFINED = 3;
const printData = async (data, indent) => {
if (!Array.isArray(data)) throw new Error("Not an array");
if (Buffer.isBuffer(data[0])) {
for (const b of data) {
if (typeof b === "function") {
const innerData = await b();
console.log(`${indent}= lazy {`);
await printData(innerData, indent + " ");
console.log(`${indent}}`);
} else {
console.log(`${indent}= ${b.toString("hex")}`);
}
}
return;
}
let i = 0;
const read = () => {
return data[i++];
};
console.log(`${indent}Version: ${read()}`);
while (i < data.length) {
const item = read();
if (item === ESCAPE) {
const nextItem = read();
if (nextItem === ESCAPE_ESCAPE_VALUE) {
console.log(`${indent}- null`);
} else if (nextItem === ESCAPE_UNDEFINED) {
console.log(`${indent}- undefined`);
} else if (nextItem === ESCAPE_END_OBJECT) {
indent = indent.slice(0, indent.length - 2);
console.log(`${indent}}`);
} else if (typeof nextItem === "number") {
console.log(`${indent}- Reference ${nextItem}`);
} else {
let name = read();
console.log(`${indent}- Object (${name}) {`);
indent += " ";
}
} else if (typeof item === "string") {
console.log(`${indent}- string ${JSON.stringify(item)}`);
} else if (Buffer.isBuffer(item)) {
console.log(`${indent}- buffer ${item.toString("hex")}`);
} else if (typeof item === "function") {
const innerData = await item();
console.log(`${indent}- lazy {`);
await printData(innerData, indent + " ");
console.log(`${indent}}`);
}
}
};
const filename = process.argv[2];
console.log(`Printing content of ${filename}`);
serializer
.deserializeFromFile(path.resolve(filename))
.then(data => printData(data, ""));