fix(types): more

This commit is contained in:
alexander.akait 2024-03-17 20:15:44 +03:00
parent f7c8e2c9a9
commit 3e3dfd781b
40 changed files with 379 additions and 113 deletions

22
declarations.d.ts vendored
View File

@ -124,14 +124,15 @@ declare module "neo-async" {
// There are no typings for @webassemblyjs/ast
declare module "@webassemblyjs/ast" {
export interface Visitor {
ModuleImport?: (p: NodePath<ModuleImport>) => void;
ModuleExport?: (p: NodePath<ModuleExport>) => void;
Start?: (p: NodePath<Start>) => void;
Global?: (p: NodePath<Global>) => void;
}
export function traverse(
ast: any,
visitor: {
ModuleImport?: (p: NodePath<ModuleImport>) => void;
ModuleExport?: (p: NodePath<ModuleExport>) => void;
Start?: (p: NodePath<Start>) => void;
Global?: (p: NodePath<Global>) => void;
}
visitor: Visitor
): void;
export class NodePath<T> {
node: T;
@ -246,6 +247,15 @@ declare module "@webassemblyjs/ast" {
export function isFuncImportDescr(n: Node): boolean;
}
declare module "@webassemblyjs/wasm-parser" {
export function decode(source: string | Buffer, options: { dump?: boolean, ignoreCodeSection?: boolean, ignoreDataSection?: boolean, ignoreCustomNameSection?: boolean }): any;
}
declare module "@webassemblyjs/wasm-edit" {
export function addWithAST(ast: any, bin: any, newNodes: import("@webassemblyjs/ast").Node[]): ArrayBuffer;
export function editWithAST(ast: any, bin: any, visitors: import("@webassemblyjs/ast").Visitor): ArrayBuffer;
}
declare module "webpack-sources" {
export type MapOptions = { columns?: boolean; module?: boolean };

View File

@ -12,7 +12,7 @@ export type BannerPluginArgument =
* The banner as function, it will be wrapped in a comment.
*/
export type BannerFunction = (data: {
hash: string;
hash?: string;
chunk: import("../../lib/Chunk");
filename: string;
}) => string;

View File

@ -27,7 +27,7 @@ class AutomaticPrefetchPlugin {
);
}
);
/** @type {{context: string, request: string}[] | null} */
/** @type {{context: string | null, request: string}[] | null} */
let lastModules = null;
compiler.hooks.afterCompile.tap("AutomaticPrefetchPlugin", compilation => {
lastModules = [];

View File

@ -196,7 +196,7 @@ class CacheFacade {
/**
* @param {Cache} cache the root cache
* @param {string} name the child cache name
* @param {string | HashConstructor} hashFunction the hash function to use
* @param {(string | HashConstructor)=} hashFunction the hash function to use
*/
constructor(cache, name, hashFunction) {
this._cache = cache;

View File

@ -102,6 +102,8 @@ const { isSourceEqual } = require("./util/source");
/** @typedef {import("./Dependency").ReferencedExport} ReferencedExport */
/** @typedef {import("./DependencyTemplate")} DependencyTemplate */
/** @typedef {import("./Entrypoint").EntryOptions} EntryOptions */
/** @typedef {import("./Module").BuildInfo} BuildInfo */
/** @typedef {import("./NormalModule").NormalModuleCompilationHooks} NormalModuleCompilationHooks */
/** @typedef {import("./Module").CodeGenerationResult} CodeGenerationResult */
/** @typedef {import("./ModuleFactory")} ModuleFactory */
/** @typedef {import("./ModuleGraphConnection")} ModuleGraphConnection */
@ -363,6 +365,10 @@ const esmDependencyCategory = "esm";
// TODO webpack 6: remove
const deprecatedNormalModuleLoaderHook = util.deprecate(
/**
* @param {Compilation} compilation compilation
* @returns {NormalModuleCompilationHooks["loader"]} hooks
*/
compilation => {
return require("./NormalModule").getCompilationHooks(compilation).loader;
},
@ -437,6 +443,10 @@ class Compilation {
const processAssetsHook = new AsyncSeriesHook(["assets"]);
let savedAssets = new Set();
/**
* @param {CompilationAssets} assets assets
* @returns {CompilationAssets} new assets
*/
const popNewAssets = assets => {
let newAssets = undefined;
for (const file of Object.keys(assets)) {
@ -1068,6 +1078,10 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
// TODO webpack 6 remove
this.compilationDependencies = {
add: util.deprecate(
/**
* @param {string} item item
* @returns {LazySet<string>} file dependencies
*/
item => this.fileDependencies.add(item),
"Compilation.compilationDependencies is deprecated (used Compilation.fileDependencies instead)",
"DEP_WEBPACK_COMPILATION_COMPILATION_DEPENDENCIES"
@ -1121,12 +1135,20 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
}
}
/**
* @param {NormalizedStatsOptions} options options
* @returns {StatsFactory} the stats factory
*/
createStatsFactory(options) {
const statsFactory = new StatsFactory();
this.hooks.statsFactory.call(statsFactory, options);
return statsFactory;
}
/**
* @param {NormalizedStatsOptions} options options
* @returns {StatsPrinter} the stats printer
*/
createStatsPrinter(options) {
const statsPrinter = new StatsPrinter();
this.hooks.statsPrinter.call(statsPrinter, options);
@ -1166,7 +1188,9 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
case LogType.warn:
case LogType.error:
case LogType.trace:
trace = ErrorHelpers.cutOffLoaderExecution(new Error("Trace").stack)
trace = ErrorHelpers.cutOffLoaderExecution(
/** @type {string} */ (new Error("Trace").stack)
)
.split("\n")
.slice(3);
break;
@ -1609,7 +1633,7 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
} catch (err) {
if (inProgressSorting <= 0) return;
inProgressSorting = -1;
onDependenciesSorted(err);
onDependenciesSorted(/** @type {WebpackError} */ (err));
return;
}
if (--inProgressSorting === 0) onDependenciesSorted();
@ -1699,7 +1723,7 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
/** @type {DependenciesBlock[]} */
const queue = [module];
do {
const block = queue.pop();
const block = /** @type {DependenciesBlock} */ (queue.pop());
if (block.dependencies) {
currentBlock = block;
let i = 0;
@ -1716,6 +1740,13 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
if (--inProgressSorting === 0) onDependenciesSorted();
}
/**
* @private
* @param {Module} originModule original module
* @param {Dependency} dependency dependency
* @param {Module} module cached module
* @param {Callback} callback callback
*/
_handleNewModuleFromUnsafeCache(originModule, dependency, module, callback) {
const moduleGraph = this.moduleGraph;
@ -1740,6 +1771,12 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
);
}
/**
* @private
* @param {Module} originModule original modules
* @param {Dependency} dependency dependency
* @param {Module} module cached module
*/
_handleExistingModuleFromUnsafeCache(originModule, dependency, module) {
const moduleGraph = this.moduleGraph;
@ -1826,27 +1863,30 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
moduleGraph.setProfile(newModule, currentProfile);
}
this.addModule(newModule, (err, module) => {
this.addModule(newModule, (err, _module) => {
if (err) {
applyFactoryResultDependencies();
if (!err.module) {
err.module = module;
err.module = _module;
}
this.errors.push(err);
return callback(err);
}
const module =
/** @type {Module & { restoreFromUnsafeCache?: Function }} */
(_module);
if (
this._unsafeCache &&
factoryResult.cacheable !== false &&
/** @type {any} */ (module).restoreFromUnsafeCache &&
module.restoreFromUnsafeCache &&
this._unsafeCachePredicate(module)
) {
const unsafeCacheableModule =
/** @type {Module & { restoreFromUnsafeCache: Function }} */ (
module
);
/** @type {Module & { restoreFromUnsafeCache: Function }} */
(module);
for (let i = 0; i < dependencies.length; i++) {
const dependency = dependencies[i];
moduleGraph.setResolvedModule(
@ -1901,6 +1941,15 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
);
}
/**
* @private
* @param {Module} originModule original module
* @param {Module} module module
* @param {boolean} recursive true if make it recursive, otherwise false
* @param {boolean} checkCycle true if need to check cycle, otherwise false
* @param {ModuleCallback} callback callback
* @returns {void}
*/
_handleModuleBuildAndDependencies(
originModule,
module,
@ -2177,7 +2226,10 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
}
};
entryData[target].push(entry);
this.entries.set(name, entryData);
this.entries.set(
/** @type {NonNullable<EntryOptions["name"]>} */ (name),
entryData
);
} else {
entryData[target].push(entry);
for (const key of Object.keys(options)) {
@ -2578,6 +2630,7 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
const ParallelismFactorCalculator = require("./util/ParallelismFactorCalculator");
const p = new ParallelismFactorCalculator();
const moduleGraph = this.moduleGraph;
/** @type {Map<Module, ModuleProfile>} */
const modulesWithProfiles = new Map();
for (const module of this.modules) {
const profile = moduleGraph.getProfile(module);
@ -2638,6 +2691,11 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
logger.debug(msg);
}
};
/**
* @param {string} category a category
* @param {(profile: ModuleProfile) => number} getDuration get duration callback
* @param {(profile: ModuleProfile) => number} getParallelism get parallelism callback
*/
const logNormalSummary = (category, getDuration, getParallelism) => {
let sum = 0;
let max = 0;
@ -2662,6 +2720,11 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
`${Math.round(sum)} ms ${category}`
);
};
/**
* @param {string} category a category
* @param {(profile: ModuleProfile) => number} getDuration get duration callback
* @param {(profile: ModuleProfile) => number} getParallelism get parallelism callback
*/
const logByLoadersSummary = (category, getDuration, getParallelism) => {
const map = new Map();
for (const [module, profile] of modulesWithProfiles) {
@ -3960,6 +4023,7 @@ Or do you want to use the entrypoints '${name}' and '${runtime}' independently o
let statModulesFromCache = 0;
const { chunkGraph, runtimeTemplate, moduleMemCaches2 } = this;
const { hashFunction, hashDigest, hashDigestLength } = this.outputOptions;
/** @type {WebpackError[]} */
const errors = [];
for (const module of this.modules) {
const memCache = moduleMemCaches2 && moduleMemCaches2.get(module);
@ -4008,6 +4072,18 @@ Or do you want to use the entrypoints '${name}' and '${runtime}' independently o
);
}
/**
* @private
* @param {Module} module module
* @param {ChunkGraph} chunkGraph the chunk graph
* @param {RuntimeSpec} runtime runtime
* @param {OutputOptions["hashFunction"]} hashFunction hash function
* @param {RuntimeTemplate} runtimeTemplate runtime template
* @param {OutputOptions["hashDigest"]} hashDigest hash digest
* @param {OutputOptions["hashDigestLength"]} hashDigestLength hash digest length
* @param {WebpackError[]} errors errors
* @returns {string} module hash digest
*/
_createModuleHash(
module,
chunkGraph,
@ -4028,7 +4104,7 @@ Or do you want to use the entrypoints '${name}' and '${runtime}' independently o
});
moduleHashDigest = /** @type {string} */ (moduleHash.digest(hashDigest));
} catch (err) {
errors.push(new ModuleHashingError(module, err));
errors.push(new ModuleHashingError(module, /** @type {Error} */ (err)));
moduleHashDigest = "XXXXXX";
}
chunkGraph.setModuleHashes(
@ -4042,7 +4118,7 @@ Or do you want to use the entrypoints '${name}' and '${runtime}' independently o
createHash() {
this.logger.time("hashing: initialize hash");
const chunkGraph = this.chunkGraph;
const chunkGraph = /** @type {ChunkGraph} */ (this.chunkGraph);
const runtimeTemplate = this.runtimeTemplate;
const outputOptions = this.outputOptions;
const hashFunction = outputOptions.hashFunction;
@ -4137,7 +4213,9 @@ Or do you want to use the entrypoints '${name}' and '${runtime}' independently o
for (const chunk of runtimeChunks) {
const hasFullHashModules =
chunkGraph.getNumberOfChunkFullHashModules(chunk) !== 0;
const info = runtimeChunksMap.get(chunk);
const info =
/** @type {RuntimeChunkInfo} */
(runtimeChunksMap.get(chunk));
for (const otherInfo of info.referencedBy) {
if (hasFullHashModules) {
chunkGraph.upgradeDependentToFullHashModules(otherInfo.chunk);
@ -4181,8 +4259,12 @@ This prevents using hashes of each other and should be avoided.`);
const codeGenerationJobs = [];
/** @type {Map<string, Map<Module, {module: Module, hash: string, runtime: RuntimeSpec, runtimes: RuntimeSpec[]}>>} */
const codeGenerationJobsMap = new Map();
/** @type {WebpackError[]} */
const errors = [];
/**
* @param {Chunk} chunk chunk
*/
const processChunk = chunk => {
// Last minute module hash generation for modules that depend on chunk hashes
this.logger.time("hashing: hash runtime modules");
@ -4271,7 +4353,9 @@ This prevents using hashes of each other and should be avoided.`);
this.logger.time("hashing: process full hash modules");
for (const chunk of fullHashChunks) {
for (const module of chunkGraph.getChunkFullHashModulesIterable(chunk)) {
for (const module of /** @type {Iterable<RuntimeModule>} */ (
chunkGraph.getChunkFullHashModulesIterable(chunk)
)) {
const moduleHash = createHash(hashFunction);
module.updateHash(moduleHash, {
chunkGraph,
@ -4345,6 +4429,9 @@ This prevents using hashes of each other and should be avoided.`);
const newRelated = newInfo && newInfo.related;
if (oldRelated) {
for (const key of Object.keys(oldRelated)) {
/**
* @param {string} name name
*/
const remove = name => {
const relatedIn = this._assetsRelatedIn.get(name);
if (relatedIn === undefined) return;
@ -4365,6 +4452,9 @@ This prevents using hashes of each other and should be avoided.`);
}
if (newRelated) {
for (const key of Object.keys(newRelated)) {
/**
* @param {string} name name
*/
const add = name => {
let relatedIn = this._assetsRelatedIn.get(name);
if (relatedIn === undefined) {
@ -4389,7 +4479,7 @@ This prevents using hashes of each other and should be avoided.`);
/**
* @param {string} file file name
* @param {Source | function(Source): Source} newSourceOrFunction new asset source or function converting old to new
* @param {AssetInfo | function(AssetInfo | undefined): AssetInfo} assetInfoUpdateOrFunction new asset info or function converting old to new
* @param {(AssetInfo | function(AssetInfo | undefined): AssetInfo) | undefined} assetInfoUpdateOrFunction new asset info or function converting old to new
*/
updateAsset(
file,
@ -4420,6 +4510,10 @@ This prevents using hashes of each other and should be avoided.`);
}
}
/**
* @param {string} file file name
* @param {string} newFile the new name of file
*/
renameAsset(file, newFile) {
const source = this.assets[file];
if (!source) {
@ -4558,9 +4652,10 @@ This prevents using hashes of each other and should be avoided.`);
createModuleAssets() {
const { chunkGraph } = this;
for (const module of this.modules) {
if (module.buildInfo.assets) {
const assetsInfo = module.buildInfo.assetsInfo;
for (const assetName of Object.keys(module.buildInfo.assets)) {
const buildInfo = /** @type {BuildInfo} */ (module.buildInfo);
if (buildInfo.assets) {
const assetsInfo = buildInfo.assetsInfo;
for (const assetName of Object.keys(buildInfo.assets)) {
const fileName = this.getPath(assetName, {
chunkGraph: this.chunkGraph,
module
@ -4570,7 +4665,7 @@ This prevents using hashes of each other and should be avoided.`);
}
this.emitAsset(
fileName,
module.buildInfo.assets[assetName],
buildInfo.assets[assetName],
assetsInfo ? assetsInfo.get(assetName) : undefined
);
this.hooks.moduleAsset.call(module, fileName);
@ -4617,7 +4712,9 @@ This prevents using hashes of each other and should be avoided.`);
runtimeTemplate: this.runtimeTemplate
});
} catch (err) {
this.errors.push(new ChunkRenderError(chunk, "", err));
this.errors.push(
new ChunkRenderError(chunk, "", /** @type {Error} */ (err))
);
return callback();
}
asyncLib.forEach(
@ -4871,7 +4968,7 @@ This prevents using hashes of each other and should be avoided.`);
});
},
err => {
if (err) return callback(err);
if (err) return callback(/** @type {WebpackError} */ (err));
// Create new chunk graph, chunk and entrypoint for the build time execution
const chunkGraph = new ChunkGraph(
@ -5054,7 +5151,10 @@ This prevents using hashes of each other and should be avoided.`);
missingDependencies,
buildDependencies
);
if (module.buildInfo.cacheable === false) {
if (
/** @type {BuildInfo} */ (module.buildInfo).cacheable ===
false
) {
cacheable = false;
}
if (module.buildInfo && module.buildInfo.assets) {
@ -5288,6 +5388,9 @@ Object.defineProperty(compilationPrototype, "cache", {
"DEP_WEBPACK_COMPILATION_CACHE"
),
set: util.deprecate(
/**
* @param {any} v value
*/
v => {},
"Compilation.cache was removed in favor of Compilation.getCache()",
"DEP_WEBPACK_COMPILATION_CACHE"

View File

@ -44,6 +44,7 @@ const { isSourceEqual } = require("./util/source");
/** @typedef {import("./Dependency")} Dependency */
/** @typedef {import("./FileSystemInfo").FileSystemInfoEntry} FileSystemInfoEntry */
/** @typedef {import("./Module")} Module */
/** @typedef {import("./Module").BuildInfo} BuildInfo */
/** @typedef {import("./logging/createConsoleLogger").LoggingFunction} LoggingFunction */
/** @typedef {import("./util/WeakTupleMap")} WeakTupleMap */
/** @typedef {import("./util/fs").IStats} IStats */
@ -272,7 +273,7 @@ class Compiler {
this.cache = new Cache();
/** @type {Map<Module, { buildInfo: object, references: References | undefined, memCache: WeakTupleMap }> | undefined} */
/** @type {Map<Module, { buildInfo: BuildInfo, references: References | undefined, memCache: WeakTupleMap }> | undefined} */
this.moduleMemCaches = undefined;
this.compilerPath = "";
@ -664,7 +665,7 @@ class Compiler {
const assets = compilation.getAssets();
compilation.assets = { ...compilation.assets };
/** @type {Map<string, { path: string, source: Source, size: number, waiting: { cacheEntry: any, file: string }[] }>} */
/** @type {Map<string, SimilarEntry>} */
const caseInsensitiveMap = new Map();
/** @type {Set<string>} */
const allTargetPaths = new Set();
@ -747,12 +748,12 @@ ${other}`);
} else {
caseInsensitiveMap.set(
caseInsensitiveTargetPath,
(similarEntry = {
(similarEntry = /** @type {SimilarEntry} */ ({
path: targetPath,
source,
size: undefined,
waiting: undefined
})
}))
);
return false;
}

View File

@ -12,7 +12,7 @@ const WebpackError = require("./WebpackError");
/**
* @template T
* @callback Callback
* @param {Error=} err
* @param {Error | null} err
* @param {T=} stats
* @returns {void}
*/

View File

@ -48,6 +48,7 @@ const {
/** @typedef {import("./Module")} Module */
/** @typedef {import("./RuntimeModule")} RuntimeModule */
/** @typedef {import("./util/runtime").RuntimeSpec} RuntimeSpec */
/** @typedef {import("./javascript/JavascriptParser")} JavascriptParser */
/**
* @typedef {Object} HMRJavascriptParserHooks

View File

@ -40,7 +40,7 @@ const sortFragmentWithIndex = ([a, i], [b, j]) => {
*/
class InitFragment {
/**
* @param {string | Source} content the source code that will be included as initialization code
* @param {string | Source | undefined} content the source code that will be included as initialization code
* @param {number} stage category of initialization code (contribute to order)
* @param {number} position position in the category (contribute to order)
* @param {string=} key unique key to avoid emitting the same initialization code twice
@ -56,7 +56,7 @@ class InitFragment {
/**
* @param {GenerateContext} context context
* @returns {string | Source} the source code that will be included as initialization code
* @returns {string | Source | undefined} the source code that will be included as initialization code
*/
getContent(context) {
return this.content;

View File

@ -64,7 +64,7 @@ const makeSerializable = require("./util/makeSerializable");
* @property {RuntimeSpec} runtime the runtime code should be generated for
* @property {RuntimeSpec[]} [runtimes] the runtimes code should be generated for
* @property {ConcatenationScope=} concatenationScope when in concatenated module, information about other concatenated modules
* @property {CodeGenerationResults} codeGenerationResults code generation results of other modules (need to have a codeGenerationDependency to use that)
* @property {CodeGenerationResults | undefined} codeGenerationResults code generation results of other modules (need to have a codeGenerationDependency to use that)
* @property {Compilation=} compilation the compilation
* @property {ReadonlySet<string>=} sourceTypes source types
*/
@ -270,6 +270,9 @@ class Module extends DependenciesBlock {
).setProfile(this, value);
}
/**
* @returns {number | null} the pre order index
*/
get index() {
return ModuleGraph.getModuleGraphForModule(
this,
@ -278,6 +281,9 @@ class Module extends DependenciesBlock {
).getPreOrderIndex(this);
}
/**
* @param {number} value the pre order index
*/
set index(value) {
ModuleGraph.getModuleGraphForModule(
this,
@ -286,6 +292,9 @@ class Module extends DependenciesBlock {
).setPreOrderIndex(this, value);
}
/**
* @returns {number | null} the post order index
*/
get index2() {
return ModuleGraph.getModuleGraphForModule(
this,
@ -294,6 +303,9 @@ class Module extends DependenciesBlock {
).getPostOrderIndex(this);
}
/**
* @param {number} value the post order index
*/
set index2(value) {
ModuleGraph.getModuleGraphForModule(
this,
@ -302,6 +314,9 @@ class Module extends DependenciesBlock {
).setPostOrderIndex(this, value);
}
/**
* @returns {number | null} the depth
*/
get depth() {
return ModuleGraph.getModuleGraphForModule(
this,
@ -310,6 +325,9 @@ class Module extends DependenciesBlock {
).getDepth(this);
}
/**
* @param {number} value the depth
*/
set depth(value) {
ModuleGraph.getModuleGraphForModule(
this,
@ -318,6 +336,9 @@ class Module extends DependenciesBlock {
).setDepth(this, value);
}
/**
* @returns {Module | null | undefined} issuer
*/
get issuer() {
return ModuleGraph.getModuleGraphForModule(
this,
@ -326,6 +347,9 @@ class Module extends DependenciesBlock {
).getIssuer(this);
}
/**
* @param {Module | null} value issuer
*/
set issuer(value) {
ModuleGraph.getModuleGraphForModule(
this,
@ -872,6 +896,7 @@ class Module extends DependenciesBlock {
codeGenerationResults: undefined
};
const sources = this.codeGeneration(codeGenContext).sources;
return type ? sources.get(type) : sources.get(first(this.getSourceTypes()));
}

View File

@ -205,7 +205,7 @@ class ModuleGraph {
}
/**
* @param {Module} originModule the referencing module
* @param {Module | null} originModule the referencing module
* @param {Dependency} dependency the referencing dependency
* @param {Module} module the referenced module
* @returns {void}
@ -818,7 +818,7 @@ class ModuleGraph {
const fn = args.pop();
if (this._moduleMemCaches && this._cacheStage) {
const memCache = this._moduleMemCaches.get(
this.getParentModule(dependency)
/** @type {Module} */ (this.getParentModule(dependency))
);
if (memCache !== undefined) {
return memCache.provide(dependency, this._cacheStage, ...args, () =>

View File

@ -235,7 +235,7 @@ class NodeStuffPlugin {
relative(
/** @type {InputFileSystem} */ (compiler.inputFileSystem),
context,
module.context
/** @type {string} */ (module.context)
)
);
break;
@ -245,7 +245,9 @@ class NodeStuffPlugin {
.for("__dirname")
.tap(PLUGIN_NAME, expr => {
if (!parser.state.module) return;
return evaluateToString(parser.state.module.context)(expr);
return evaluateToString(
/** @type {string} */ (parser.state.module.context)
)(expr);
});
}
parser.hooks.expression

View File

@ -926,6 +926,8 @@ class NormalModule extends Module {
loaderContext._compilation =
loaderContext._compiler =
loaderContext._module =
// eslint-disable-next-line no-warning-comments
// @ts-ignore
loaderContext.fs =
undefined;
@ -1352,7 +1354,7 @@ class NormalModule extends Module {
? new RawSource(
"throw new Error(" + JSON.stringify(this.error.message) + ");"
)
: this.generator.generate(this, {
: /** @type {Generator} */ (this.generator).generate(this, {
dependencyTemplates,
runtimeTemplate,
moduleGraph,

View File

@ -268,7 +268,7 @@ class NormalModuleFactory extends ModuleFactory {
resolveInScheme: new HookMap(
() => new AsyncSeriesBailHook(["resourceData", "resolveData"])
),
/** @type {AsyncSeriesBailHook<[ResolveData], Module>} */
/** @type {AsyncSeriesBailHook<[ResolveData], Module | undefined>} */
factorize: new AsyncSeriesBailHook(["resolveData"]),
/** @type {AsyncSeriesBailHook<[ResolveData], false | void>} */
beforeResolve: new AsyncSeriesBailHook(["resolveData"]),

View File

@ -59,7 +59,9 @@ class ProvidePlugin {
*/
const handler = (parser, parserOptions) => {
Object.keys(definitions).forEach(name => {
const request = [].concat(definitions[name]);
const request =
/** @type {string[]} */
([]).concat(definitions[name]);
const splittedName = name.split(".");
if (splittedName.length > 0) {
splittedName.slice(1).forEach((_, i) => {

View File

@ -7,6 +7,7 @@
/** @typedef {import("../declarations/WebpackOptions").StatsOptions} StatsOptions */
/** @typedef {import("./Compilation")} Compilation */
/** @typedef {import("./Compilation").NormalizedStatsOptions} NormalizedStatsOptions */
/** @typedef {import("./stats/DefaultStatsFactoryPlugin").StatsCompilation} StatsCompilation */
class Stats {
@ -58,7 +59,9 @@ class Stats {
forToString: false
});
const statsFactory = this.compilation.createStatsFactory(options);
const statsFactory = this.compilation.createStatsFactory(
/** @type {NormalizedStatsOptions} */ (options)
);
return statsFactory.create("compilation", this.compilation, {
compilation: this.compilation
@ -74,8 +77,12 @@ class Stats {
forToString: true
});
const statsFactory = this.compilation.createStatsFactory(options);
const statsPrinter = this.compilation.createStatsPrinter(options);
const statsFactory = this.compilation.createStatsFactory(
/** @type {NormalizedStatsOptions} */ (options)
);
const statsPrinter = this.compilation.createStatsPrinter(
/** @type {NormalizedStatsOptions} */ (options)
);
const data = statsFactory.create("compilation", this.compilation, {
compilation: this.compilation

View File

@ -71,7 +71,9 @@ class RawDataUrlModule extends Module {
* @returns {string} a user readable identifier of the module
*/
readableIdentifier(requestShortener) {
return requestShortener.shorten(this.readableIdentifierStr);
return /** @type {string} */ (
requestShortener.shorten(this.readableIdentifierStr)
);
}
/**

View File

@ -13,7 +13,7 @@ const Template = require("../Template");
/** @typedef {import("../Generator").GenerateContext} GenerateContext */
/**
* @typedef {GenerateContext} Context
* @extends {InitFragment<GenerateContext>}
*/
class AwaitDependenciesInitFragment extends InitFragment {
/**

View File

@ -9,12 +9,13 @@ const Cache = require("../Cache");
const ProgressPlugin = require("../ProgressPlugin");
/** @typedef {import("../Compiler")} Compiler */
/** @typedef {import("./PackFileCacheStrategy")} PackFileCacheStrategy */
const BUILD_DEPENDENCIES_KEY = Symbol();
class IdleFileCachePlugin {
/**
* @param {TODO} strategy cache strategy
* @param {PackFileCacheStrategy} strategy cache strategy
* @param {number} idleTimeout timeout
* @param {number} idleTimeoutForInitialStore initial timeout
* @param {number} idleTimeoutAfterLargeChanges timeout after changes
@ -93,7 +94,9 @@ class IdleFileCachePlugin {
{ name: "IdleFileCachePlugin", stage: Cache.STAGE_DISK },
dependencies => {
pendingIdleTasks.set(BUILD_DEPENDENCIES_KEY, () =>
strategy.storeBuildDependencies(dependencies)
Promise.resolve().then(() =>
strategy.storeBuildDependencies(dependencies)
)
);
}
);
@ -227,7 +230,9 @@ class IdleFileCachePlugin {
compiler.hooks.done.tap("IdleFileCachePlugin", stats => {
// 10% build overhead is ignored, as it's not cacheable
timeSpendInBuild *= 0.9;
timeSpendInBuild += stats.endTime - stats.startTime;
timeSpendInBuild +=
/** @type {number} */ (stats.endTime) -
/** @type {number} */ (stats.startTime);
});
}
}

View File

@ -19,7 +19,7 @@ class MemoryCachePlugin {
* @returns {void}
*/
apply(compiler) {
/** @type {Map<string, { etag: Etag | null, data: any }>} */
/** @type {Map<string, { etag: Etag | null, data: any } | null>} */
const cache = new Map();
compiler.cache.hooks.store.tap(
{ name: "MemoryCachePlugin", stage: Cache.STAGE_MEMORY },

View File

@ -13,6 +13,10 @@ const Cache = require("../Cache");
/** @typedef {import("../Module")} Module */
class MemoryWithGcCachePlugin {
/**
* @param {Object} options Options
* @param {number} options.maxGenerations max generations
*/
constructor({ maxGenerations }) {
this._maxGenerations = maxGenerations;
}
@ -23,9 +27,9 @@ class MemoryWithGcCachePlugin {
*/
apply(compiler) {
const maxGenerations = this._maxGenerations;
/** @type {Map<string, { etag: Etag | null, data: any }>} */
/** @type {Map<string, { etag: Etag | null, data: any } | undefined | null>} */
const cache = new Map();
/** @type {Map<string, { entry: { etag: Etag | null, data: any }, until: number }>} */
/** @type {Map<string, { entry: { etag: Etag | null, data: any } | null, until: number }>} */
const oldCache = new Map();
let generation = 0;
let cachePosition = 0;

View File

@ -28,13 +28,16 @@ const {
/** @typedef {import("../util/fs").IntermediateFileSystem} IntermediateFileSystem */
/** @typedef {Map<string, string | false>} ResolveResults */
/** @typedef {Set<string>} Items */
/** @typedef {Set<string>} BuildDependencies */
/** @typedef {Map<string, PackItemInfo>} ItemInfo */
class PackContainer {
/**
* @param {Object} data stored data
* @param {string} version version identifier
* @param {Snapshot} buildSnapshot snapshot of all build dependencies
* @param {Set<string>} buildDependencies list of all unresolved build dependencies captured
* @param {BuildDependencies} buildDependencies list of all unresolved build dependencies captured
* @param {ResolveResults} resolveResults result of the resolved build dependencies
* @param {Snapshot} resolveBuildDependenciesSnapshot snapshot of the dependencies of the build dependencies resolving
*/
@ -54,13 +57,17 @@ class PackContainer {
this.resolveBuildDependenciesSnapshot = resolveBuildDependenciesSnapshot;
}
/**
* @param {ObjectSerializerContext} context context
*/
serialize({ write, writeLazy }) {
write(this.version);
write(this.buildSnapshot);
write(this.buildDependencies);
write(this.resolveResults);
write(this.resolveBuildDependenciesSnapshot);
writeLazy(this.data);
/** @type {NonNullable<ObjectSerializerContext["writeLazy"]>} */
(writeLazy)(this.data);
}
/**
@ -104,13 +111,17 @@ class PackItemInfo {
}
class Pack {
/**
* @param {Logger} logger a logger
* @param {number} maxAge max age of cache items
*/
constructor(logger, maxAge) {
/** @type {Map<string, PackItemInfo>} */
/** @type {ItemInfo} */
this.itemInfo = new Map();
/** @type {(string | undefined)[]} */
this.requests = [];
this.requestsTimeout = undefined;
/** @type {Map<string, PackItemInfo>} */
/** @type {ItemInfo} */
this.freshContent = new Map();
/** @type {(undefined | PackContent)[]} */
this.content = [];
@ -224,12 +235,18 @@ class Pack {
return i;
}
/**
* @private
* @param {Items} items items
* @param {Items} usedItems used items
* @param {number} newLoc new location
*/
_gcAndUpdateLocation(items, usedItems, newLoc) {
let count = 0;
let lastGC;
const now = Date.now();
for (const identifier of items) {
const info = this.itemInfo.get(identifier);
const info = /** @type {PackItemInfo} */ (this.itemInfo.get(identifier));
if (now - info.lastAccess > this.maxAge) {
this.itemInfo.delete(identifier);
items.delete(identifier);
@ -263,7 +280,7 @@ class Pack {
const loc = this._findLocation();
this.content[loc] = null; // reserve
const pack = {
/** @type {Set<string>} */
/** @type {Items} */
items: new Set(),
/** @type {Map<string, any>} */
map: new Map(),
@ -372,9 +389,9 @@ class Pack {
}
// 4. Determine merged items
/** @type {Set<string>} */
/** @type {Items} */
const mergedItems = new Set();
/** @type {Set<string>} */
/** @type {Items} */
const mergedUsedItems = new Set();
/** @type {(function(Map<string, any>): Promise<void>)[]} */
const addToMergedMap = [];
@ -544,6 +561,9 @@ class Pack {
}
}
/**
* @param {ObjectSerializerContext} context context
*/
serialize({ write, writeSeparate }) {
this._persistFreshContent();
this._optimizeSmallContent();
@ -571,6 +591,9 @@ class Pack {
write(null); // null as marker of the end of items
}
/**
* @param {ObjectDeserializerContext & { logger: Logger }} context context
*/
deserialize({ read, logger }) {
this.logger = logger;
{
@ -629,6 +652,9 @@ class PackContentItems {
this.map = map;
}
/**
* @param {ObjectSerializerContext & { snapshot: TODO, rollback: TODO, logger: Logger, profile: boolean | undefined }} context context
*/
serialize({ write, snapshot, rollback, logger, profile }) {
if (profile) {
write(false);
@ -695,6 +721,9 @@ class PackContentItems {
}
}
/**
* @param {ObjectDeserializerContext & { logger: Logger, profile: boolean | undefined }} context context
*/
deserialize({ read, logger, profile }) {
if (read()) {
this.map = read();
@ -760,17 +789,17 @@ class PackContent {
*/
/**
* @param {Set<string>} items keys
* @param {Set<string>} usedItems used keys
* @param {Items} items keys
* @param {Items} usedItems used keys
* @param {PackContentItems | function(): Promise<PackContentItems>} dataOrFn sync or async content
* @param {Logger=} logger logger for logging
* @param {string=} lazyName name of dataOrFn for logging
*/
constructor(items, usedItems, dataOrFn, logger, lazyName) {
this.items = items;
/** @type {function(): Promise<PackContentItems> | PackContentItems} */
/** @type {(function(): Promise<PackContentItems> | PackContentItems) | undefined} */
this.lazy = typeof dataOrFn === "function" ? dataOrFn : undefined;
/** @type {Map<string, any>} */
/** @type {Map<string, any> | undefined} */
this.content = typeof dataOrFn === "function" ? undefined : dataOrFn.map;
this.outdated = false;
this.used = usedItems;
@ -778,6 +807,10 @@ class PackContent {
this.lazyName = lazyName;
}
/**
* @param {string} identifier identifier
* @returns {string | Promise<string>} result
*/
get(identifier) {
this.used.add(identifier);
if (this.content) {
@ -1049,7 +1082,7 @@ class PackFileCacheStrategy {
? ".pack.gz"
: ".pack";
this.snapshot = snapshot;
/** @type {Set<string>} */
/** @type {BuildDependencies} */
this.buildDependencies = new Set();
/** @type {LazySet<string>} */
this.newBuildDependencies = new LazySet();
@ -1081,9 +1114,9 @@ class PackFileCacheStrategy {
const { logger, profile, cacheLocation, version } = this;
/** @type {Snapshot} */
let buildSnapshot;
/** @type {Set<string>} */
/** @type {BuildDependencies} */
let buildDependencies;
/** @type {Set<string>} */
/** @type {BuildDependencies} */
let newBuildDependencies;
/** @type {Snapshot} */
let resolveBuildDependenciesSnapshot;
@ -1272,7 +1305,7 @@ class PackFileCacheStrategy {
}
/**
* @param {LazySet<string>} dependencies dependencies to store
* @param {LazySet<string> | Iterable<string>} dependencies dependencies to store
*/
storeBuildDependencies(dependencies) {
if (this.readonly) return;

View File

@ -8,7 +8,7 @@
const LazySet = require("../util/LazySet");
const makeSerializable = require("../util/makeSerializable");
/** @typedef {import("enhanced-resolve/lib/Resolver")} Resolver */
/** @typedef {import("enhanced-resolve").Resolver} Resolver */
/** @typedef {import("../CacheFacade").ItemCacheFacade} ItemCacheFacade */
/** @typedef {import("../Compiler")} Compiler */
/** @typedef {import("../FileSystemInfo")} FileSystemInfo */
@ -213,7 +213,10 @@ class ResolverCachePlugin {
stage: -100
},
(request, resolveContext, callback) => {
if (request._ResolverCachePluginCacheMiss || !fileSystemInfo) {
if (
/** @type {TODO} */ (request)._ResolverCachePluginCacheMiss ||
!fileSystemInfo
) {
return callback();
}
const withYield = typeof resolveContext.yield === "function";
@ -225,7 +228,9 @@ class ResolverCachePlugin {
const activeRequest = activeRequestsWithYield.get(identifier);
if (activeRequest) {
activeRequest[0].push(callback);
activeRequest[1].push(resolveContext.yield);
activeRequest[1].push(
/** @type {TODO} */ (resolveContext.yield)
);
return;
}
} else {
@ -303,19 +308,22 @@ class ResolverCachePlugin {
cachedResolves++;
if (resolveContext.missingDependencies) {
addAllToSet(
resolveContext.missingDependencies,
/** @type {LazySet<string>} */
(resolveContext.missingDependencies),
snapshot.getMissingIterable()
);
}
if (resolveContext.fileDependencies) {
addAllToSet(
resolveContext.fileDependencies,
/** @type {LazySet<string>} */
(resolveContext.fileDependencies),
snapshot.getFileIterable()
);
}
if (resolveContext.contextDependencies) {
addAllToSet(
resolveContext.contextDependencies,
/** @type {LazySet<string>} */
(resolveContext.contextDependencies),
snapshot.getContextIterable()
);
}

View File

@ -47,7 +47,7 @@ const mapObjects = new WeakMap();
/**
* @param {HashableObject} obj object with updateHash method
* @param {string | HashConstructor} hashFunction the hash function to use
* @param {(string | HashConstructor)=} hashFunction the hash function to use
* @returns {LazyHashedEtag} etag
*/
const getter = (obj, hashFunction = "md4") => {

View File

@ -9,11 +9,12 @@ const Dependency = require("../Dependency");
const makeSerializable = require("../util/makeSerializable");
/** @typedef {import("./ContainerEntryModule").ExposeOptions} ExposeOptions */
/** @typedef {import("./ContainerEntryModule").ExposesList} ExposesList */
class ContainerEntryDependency extends Dependency {
/**
* @param {string} name entry name
* @param {[string, ExposeOptions][]} exposes list of exposed modules
* @param {ExposesList} exposes list of exposed modules
* @param {string} shareScope name of the share scope
*/
constructor(name, exposes, shareScope) {

View File

@ -39,12 +39,14 @@ const ContainerExposedDependency = require("./ContainerExposedDependency");
* @property {string} name custom chunk name for the exposed module
*/
/** @typedef {[string, ExposeOptions][]} ExposesList */
const SOURCE_TYPES = new Set(["javascript"]);
class ContainerEntryModule extends Module {
/**
* @param {string} name container entry name
* @param {[string, ExposeOptions][]} exposes list of exposed modules
* @param {ExposesList} exposes list of exposed modules
* @param {string} shareScope name of the share scope
*/
constructor(name, exposes, shareScope) {

View File

@ -12,6 +12,8 @@ const ContainerExposedDependency = require("./ContainerExposedDependency");
const { parseOptions } = require("./options");
/** @typedef {import("../../declarations/plugins/container/ContainerPlugin").ContainerPluginOptions} ContainerPluginOptions */
/** @typedef {import("./ContainerEntryModule").ExposeOptions} ExposeOptions */
/** @typedef {import("./ContainerEntryModule").ExposesList} ExposesList */
/** @typedef {import("../Compiler")} Compiler */
const validate = createSchemaValidation(
@ -41,16 +43,18 @@ class ContainerPlugin {
},
runtime: options.runtime,
filename: options.filename || undefined,
exposes: parseOptions(
options.exposes,
item => ({
import: Array.isArray(item) ? item : [item],
name: undefined
}),
item => ({
import: Array.isArray(item.import) ? item.import : [item.import],
name: item.name || undefined
})
exposes: /** @type {ExposesList} */ (
parseOptions(
options.exposes,
item => ({
import: Array.isArray(item) ? item : [item],
name: undefined
}),
item => ({
import: Array.isArray(item.import) ? item.import : [item.import],
name: item.name || undefined
})
)
)
};
}

View File

@ -129,7 +129,7 @@ class FallbackModule extends Module {
*/
codeGeneration({ runtimeTemplate, moduleGraph, chunkGraph }) {
const ids = this.dependencies.map(dep =>
chunkGraph.getModuleId(moduleGraph.getModule(dep))
chunkGraph.getModuleId(/** @type {Module} */ (moduleGraph.getModule(dep)))
);
const code = Template.asString([
`var ids = ${JSON.stringify(ids)};`,

View File

@ -55,7 +55,9 @@ class ImportDependency extends ModuleDependency {
const refs = [];
for (const referencedExport of this.referencedExports) {
if (referencedExport[0] === "default") {
const selfModule = moduleGraph.getParentModule(this);
const selfModule =
/** @type {Module} */
(moduleGraph.getParentModule(this));
const importedModule =
/** @type {Module} */
(moduleGraph.getModule(this));

View File

@ -13,6 +13,7 @@ const NoAsyncChunksWarning = require("./NoAsyncChunksWarning");
/** @typedef {import("webpack-sources").Source} Source */
/** @typedef {import("../../declarations/WebpackOptions").PerformanceOptions} PerformanceOptions */
/** @typedef {import("../ChunkGroup")} ChunkGroup */
/** @typedef {import("../Compilation").Asset} Asset */
/** @typedef {import("../Compiler")} Compiler */
/** @typedef {import("../Entrypoint")} Entrypoint */
/** @typedef {import("../WebpackError")} WebpackError */
@ -32,6 +33,12 @@ const NoAsyncChunksWarning = require("./NoAsyncChunksWarning");
const isOverSizeLimitSet = new WeakSet();
/**
* @param {Asset["name"]} name the name
* @param {Asset["source"]} source the source
* @param {Asset["info"]} info the info
* @returns {boolean} result
*/
const excludeSourceMap = (name, source, info) => !info.development;
module.exports = class SizeLimitsPlugin {
@ -104,6 +111,10 @@ module.exports = class SizeLimitsPlugin {
}
}
/**
* @param {Asset["name"]} name the name
* @returns {boolean | undefined} result
*/
const fileFilter = name => {
const asset = compilation.getAsset(name);
return asset && assetFilter(asset.name, asset.source, asset.info);

View File

@ -576,6 +576,7 @@ class FileMiddleware extends SerializerMiddleware {
/** @type {number | undefined} */
let currentBufferUsed;
const buf = [];
/** @type {import("zlib").Zlib & import("stream").Transform | undefined} */
let decompression;
if (file.endsWith(".gz")) {
decompression = createGunzip({

View File

@ -46,6 +46,8 @@ Technically any value can be used.
/**
* @typedef {Object} ObjectSerializerContext
* @property {function(any): void} write
* @property {(function(any): void)=} writeLazy
* @property {(function(any, object=): (() => Promise<any> | any))=} writeSeparate
* @property {function(any): void} setCircularReference
*/

View File

@ -4,13 +4,27 @@
"use strict";
/**
* @template T, K
* @typedef {import("./SerializerMiddleware")<T, K>} SerializerMiddleware
*/
class Serializer {
/**
* @param {SerializerMiddleware<any, any>[]} middlewares serializer middlewares
* @param {TODO=} context context
*/
constructor(middlewares, context) {
this.serializeMiddlewares = middlewares.slice();
this.deserializeMiddlewares = middlewares.slice().reverse();
this.context = context;
}
/**
* @param {any} obj object
* @param {TODO} context content
* @returns {Promise<any>} result
*/
serialize(obj, context) {
const ctx = { ...context, ...this.context };
let current = obj;
@ -28,6 +42,11 @@ class Serializer {
return current;
}
/**
* @param {any} value value
* @param {TODO} context context
* @returns {Promise<any>} result
*/
deserialize(value, context) {
const ctx = { ...context, ...this.context };
/** @type {any} */

View File

@ -9,6 +9,9 @@ const Hash = require("../Hash");
const MAX_SHORT_STRING = require("./wasm-hash").MAX_SHORT_STRING;
class BatchedHash extends Hash {
/**
* @param {Hash} hash hash
*/
constructor(hash) {
super();
this.string = undefined;

View File

@ -77,7 +77,7 @@ class AsyncWebAssemblyJavascriptGenerator extends Generator {
const wasmDepsByRequest = new Map();
for (const dep of module.dependencies) {
if (dep instanceof WebAssemblyImportDependency) {
const module = moduleGraph.getModule(dep);
const module = /** @type {Module} */ (moduleGraph.getModule(dep));
if (!depModules.has(module)) {
depModules.set(module, {
request: dep.request,
@ -120,7 +120,9 @@ class AsyncWebAssemblyJavascriptGenerator extends Generator {
wasmDepsByRequest,
([request, deps]) => {
const exportItems = deps.map(dep => {
const importedModule = moduleGraph.getModule(dep);
const importedModule =
/** @type {Module} */
(moduleGraph.getModule(dep));
const importVar =
/** @type {ImportObjRequestItem} */
(depModules.get(importedModule)).importVar;

View File

@ -61,6 +61,7 @@ const generateImportObject = (
runtime
) => {
const moduleGraph = chunkGraph.moduleGraph;
/** @type {Map<string, string | number>} */
const waitForInstances = new Map();
const properties = [];
const usedWasmDependencies = WebAssemblyUtils.getUsedDependencies(
@ -85,7 +86,10 @@ const generateImportObject = (
if (direct) {
const instanceVar = `m${waitForInstances.size}`;
waitForInstances.set(instanceVar, chunkGraph.getModuleId(importedModule));
waitForInstances.set(
instanceVar,
chunkGraph.getModuleId(/** @type {Module} */ (importedModule))
);
properties.push({
module,
name,
@ -99,20 +103,24 @@ const generateImportObject = (
);
const mod = `${RuntimeGlobals.moduleCache}[${JSON.stringify(
chunkGraph.getModuleId(importedModule)
chunkGraph.getModuleId(/** @type {Module} */ (importedModule))
)}]`;
const modExports = `${mod}.exports`;
const cache = `wasmImportedFuncCache${declarations.length}`;
declarations.push(`var ${cache};`);
const modCode =
/** @type {Module} */
(importedModule).type.startsWith("webassembly")
? `${mod} ? ${modExports}[${JSON.stringify(usedName)}] : `
: "";
properties.push({
module,
name,
value: Template.asString([
(importedModule.type.startsWith("webassembly")
? `${mod} ? ${modExports}[${JSON.stringify(usedName)}] : `
: "") + `function(${params}) {`,
modCode + `function(${params}) {`,
Template.indent([
`if(${cache} === undefined) ${cache} = ${modExports};`,
`return ${cache}[${JSON.stringify(usedName)}](${params});`

View File

@ -40,7 +40,7 @@ const WebAssemblyExportImportedDependency = require("../dependencies/WebAssembly
/**
* @template T
* @param {Function[]} fns transforms
* @param {((prev: ArrayBuffer) => ArrayBuffer)[]} fns transforms
* @returns {Function} composed transform
*/
const compose = (...fns) => {
@ -55,7 +55,8 @@ const compose = (...fns) => {
/**
* Removes the start instruction
*
* @param {Object} state unused state
* @param {Object} state state
* @param {Object} state.ast Module's ast
* @returns {ArrayBufferTransform} transform
*/
const removeStartFunc = state => bin => {
@ -177,7 +178,9 @@ const createDefaultInitForGlobal = globalType => {
*
* Note that globals will become mutable.
*
* @param {Object} state unused state
* @param {Object} state transformation state
* @param {Object} state.ast Module's ast
* @param {t.Instruction[]} state.additionalInitCode list of addition instructions for the init function
* @returns {ArrayBufferTransform} transform
*/
const rewriteImportedGlobals = state => bin => {
@ -188,7 +191,7 @@ const rewriteImportedGlobals = state => bin => {
bin = editWithAST(state.ast, bin, {
ModuleImport(path) {
if (t.isGlobalType(path.node.descr)) {
const globalType = path.node.descr;
const globalType = /** @type {TODO} */ (path.node.descr);
globalType.mutability = "var";

View File

@ -97,8 +97,9 @@ class WebAssemblyParser extends Parser {
// extract imports and exports
/** @type {string[]} */
const exports = [];
let jsIncompatibleExports = (state.module.buildMeta.jsIncompatibleExports =
undefined);
const buildMeta = /** @type {BuildMeta} */ (state.module.buildMeta);
/** @type {Record<string, string> | undefined} */
let jsIncompatibleExports = (buildMeta.jsIncompatibleExports = undefined);
/** @type {TODO[]} */
const importedGlobals = [];
@ -118,7 +119,8 @@ class WebAssemblyParser extends Parser {
if (incompatibleType) {
if (jsIncompatibleExports === undefined) {
jsIncompatibleExports =
state.module.buildMeta.jsIncompatibleExports = {};
/** @type {BuildMeta} */
(state.module.buildMeta).jsIncompatibleExports = {};
}
jsIncompatibleExports[node.name] = incompatibleType;
}
@ -127,7 +129,8 @@ class WebAssemblyParser extends Parser {
exports.push(node.name);
if (node.descr && node.descr.exportType === "Global") {
const refNode = importedGlobals[node.descr.id.value];
const refNode =
importedGlobals[/** @type {TODO} */ (node.descr.id.value)];
if (refNode) {
const dep = new WebAssemblyExportImportedDependency(
node.name,

View File

@ -3,7 +3,7 @@
"BannerFunction": {
"description": "The banner as function, it will be wrapped in a comment.",
"instanceof": "Function",
"tsType": "(data: { hash: string, chunk: import('../../lib/Chunk'), filename: string }) => string"
"tsType": "(data: { hash?: string, chunk: import('../../lib/Chunk'), filename: string }) => string"
},
"Rule": {
"description": "Filtering rule as regex or string.",

6
types.d.ts vendored
View File

@ -449,7 +449,7 @@ declare interface BackendApi {
declare class BannerPlugin {
constructor(options: BannerPluginArgument);
options: BannerPluginOptions;
banner: (data: { hash: string; chunk: Chunk; filename: string }) => string;
banner: (data: { hash?: string; chunk: Chunk; filename: string }) => string;
/**
* Apply the plugin
@ -459,14 +459,14 @@ declare class BannerPlugin {
type BannerPluginArgument =
| string
| BannerPluginOptions
| ((data: { hash: string; chunk: Chunk; filename: string }) => string);
| ((data: { hash?: string; chunk: Chunk; filename: string }) => string);
declare interface BannerPluginOptions {
/**
* Specifies the banner.
*/
banner:
| string
| ((data: { hash: string; chunk: Chunk; filename: string }) => string);
| ((data: { hash?: string; chunk: Chunk; filename: string }) => string);
/**
* If true, the banner will only be added to the entry chunks.