removed old stuff, moved loaders to enhanced-require

This commit is contained in:
Tobias Koppers 2012-08-23 14:58:10 +02:00
parent 013a33e604
commit 2bf6851c80
8 changed files with 205 additions and 484 deletions

View File

@ -238,7 +238,7 @@ var b = require("./b"); // and files
// like in node.js
// polyfill require method to use the new members in node.js too
require = require("webpack/require-polyfill")(require.valueOf());
require = require("enhanced-require")(__filename, require.valueOf());
// create a lazy loaded bundle
require.ensure([], function(require) {

View File

@ -31,7 +31,6 @@ var argv = require("optimist")
.describe("options", "Options JSON File")
.string("public-prefix")
.alias("public-prefix", "script-src-prefix")
.describe("public-prefix", "Path Prefix For JavaScript Loading")
.string("libary")

View File

@ -4,7 +4,7 @@
*/
var parse = require("./parse");
var resolve = require("enhanced-resolve");
var execLoaders = require("./execLoaders");
var execLoaders = require("enhanced-require/lib/execLoaders");
var fs = require("fs");
var path = require("path");
var assert = require("assert");

View File

@ -1,188 +0,0 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
var resolve = require("enhanced-resolve");
var fs = require("fs");
var path = require("path");
/**
* execLoaders
* @param context {string} the context from which this request is coming
* @param request {string} the compile request string
* @param loaders {string[]} the absolute filenames of the loaders
* @param filenames {string[]} the filenames of "contents"
* @param contents {Buffer[]} read contents
* @param cacheEntry {CacheEntry} the cache entry to add dependencies
* @param options {object} the options of the module system
* @param callback {function} (err, arrayOfResultBuffers, allowCaching)
*/
module.exports = function(context, request, loaders, filenames, contents, cacheEntry, options, callback) {
var loaderFunctions, cacheable = true;
if(loaders.length === 0) {
// if no loaders are used, the file content is the resulting code
callback(null, contents, true);
} else {
// try to load all loaders
loaderFunctions = [];
try {
loaders.forEach(function(name) {
var loaderFilename = require.resolve(name);
options.events.emit("loader", loaderFilename);
var loader = require(loaderFilename);
if(loader.separable) {
// require loader in fresh context
var oldCache = {};
for(var entry in require.cache) {
oldCache[entry] = require.cache[entry];
delete require.cache[entry];
}
loader = require(loaderFilename);
for(var entry in oldCache) {
require.cache[entry] = oldCache[entry];
}
} else {
options.events.emit("static-dependency", loaderFilename);
}
loaderFunctions.push(loader);
});
} catch(e) {
callback(e);
return;
}
// iterate over the loaders, asynchron
contents.unshift(null);
nextLoader.apply(null, contents);
}
function nextLoader(/* err, paramBuffer1, paramBuffer2, ...*/) {
var args = Array.prototype.slice.apply(arguments);
var err = args.shift();
if(err) {
// a loader emitted an error
callback(err);
return;
}
// if loaders are remaining
if(loaderFunctions.length > 0) {
var loaderCacheable = false;
var async = false;
var done = false;
try {
// prepare the loader "this" context
// see "Loader Specification" in wiki
var loaderContext = {
context: context,
request: request,
filenames: filenames,
exec: function(code, filename) {
var Module = require("module");
var m = new Module("exec in " + request, module);
m.filename = filenames[0];
m.paths = Module._nodeModulePaths(path.dirname(filenames[0]));
m._compile(code, filename);
return m.exports;
},
resolve: function(context, path, cb) {
resolve(context, "!"+path, options.resolve, cb);
},
cacheable: function(value) {
if(value === undefined) value = true;
loaderCacheable = value;
},
dependency: function(filename) {
options.events.emit("dependency", filename);
if(cacheEntry)
cacheEntry.add(filename);
},
clearDependencies: function(filename) {
if(cacheEntry)
cacheEntry.clear();
},
async: function() {
async = true;
return loaderContext.callback;
},
callback: function(err) {
async = true;
if(done) {
// loader is already "done", so we cannot use the callback function
// for better debugging we print the error on the console
if(err && err.stack) console.error(err.stack);
else if(err) console.error(err);
else console.error(new Error("loader returned multiple times").stack);
return;
}
done = true;
contents = [err];
for(var i = 1; i < arguments.length; i++) {
var arg = arguments[i];
if(arg instanceof Buffer)
contents.push(arg);
else if(typeof arg === "string")
contents.push(new Buffer(arg, "utf-8"));
else
contents.push(arg);
}
loaderFinished.apply(null, arguments);
},
web: true,
debug: options.debug,
minimize: options.minimize,
values: undefined,
options: options,
buffers: args
};
// add additional loader context params or functions
if(options.loader) for(var key in options.loader)
loaderContext[key] = options.loader[key];
// convert all parameters to strings if they are Buffers
var params = [];
args.forEach(function(arg) {
if(arg instanceof Buffer)
params.push(arg.toString("utf-8"));
else
params.push(arg);
});
// exec to loader
var retVal = loaderFunctions.pop().apply(loaderContext, params);
// if it isn't asynchron, use the return value
if(!async) {
done = true;
if(retVal instanceof Buffer)
retVal = retVal;
else if(typeof retVal === "string")
retVal = new Buffer(retVal, "utf-8");
loaderFinished(retVal === undefined ? new Error("loader did not return a value") : null, retVal);
}
function loaderFinished() {
if(!loaderCacheable)
cacheable = false;
nextLoader.apply(null, arguments);
}
} catch(e) {
// ups. loader throwed an exeception
if(!done) {
done = true;
callback(new Error("Loader throwed exeception: " + (typeof e === "object" && e.stack ? e.stack : e)));
} else {
// loader is already "done", so we cannot use the callback function
// for better debugging we print the error on the console
if(typeof e === "object" && e.stack) console.error(e.stack);
else console.error(e);
}
return;
}
} else {
callback(null, args, cacheable);
}
}
}

View File

@ -13,9 +13,7 @@ var HASH_REGEXP = /\[hash\]/i;
/*
webpack(context, moduleName, options, callback);
webpack(context, moduleName, callback);
webpack(absoluteModulePath, options, callback);
webpack(absoluteModulePath, callback);
callback: function(err, source / stats)
source if options.output is not set
@ -67,25 +65,11 @@ module.exports = function(context, moduleName, options, callback) {
moduleName = context.join("!");
context = path.dirname(file);
}
if(typeof moduleName === "function") {
callback = moduleName;
options = {};
context = context.split("!");
var file = context.pop();
context.push("./" + path.basename(file));
moduleName = context.join("!");
context = path.dirname(file);
}
if(!callback) {
callback = options;
options = {};
}
// Defaults
if(!options.outputJsonpFunction)
options.outputJsonpFunction = "webpackJsonp" + (options.libary || "");
options.scriptSrcPrefix = options.scriptSrcPrefix || ""; // DEPRECATED
options.publicPrefix = options.publicPrefix || options.scriptSrcPrefix
options.publicPrefix = options.publicPrefix || "";
options.context = options.context || context;
@ -101,7 +85,9 @@ module.exports = function(context, moduleName, options, callback) {
if(!options.outputPostfix) {
options.outputPostfix = "." + options.output;
}
} // else DEPRECATED
} else {
return callback(new Error("options.output is required"));
}
options.parse = options.parse || {};
options.parse.overwrites = options.parse.overwrites || {};
@ -287,243 +273,222 @@ function webpack(context, moduleName, options, callback) {
return;
}
var buffer = [];
if(options.output) { // if options.output set, we write to files
// collect which module is in which file
var fileModulesMap = {};
// collect which module is in which file
var fileModulesMap = {};
// collect which chunks exists
var chunksCount = 0;
// collect which chunks exists
var chunksCount = 0;
// all ids of the chunks, in desc order
var chunkIds = Object.keys(depTree.chunks);
chunkIds.sort(function(a,b) {
if(typeof depTree.chunks[b].realId !== "number") return 1;
if(typeof depTree.chunks[a].realId !== "number") return -1;
return depTree.chunks[b].realId - depTree.chunks[a].realId;
});
// all ids of the chunks, in desc order
var chunkIds = Object.keys(depTree.chunks);
chunkIds.sort(function(a,b) {
if(typeof depTree.chunks[b].realId !== "number") return 1;
if(typeof depTree.chunks[a].realId !== "number") return -1;
return depTree.chunks[b].realId - depTree.chunks[a].realId;
});
// the template used
var template = getTemplate(options, {chunks: chunkIds.length > 1});
// the template used
var template = getTemplate(options, {chunks: chunkIds.length > 1});
// hash as crypto.Hash instance
// for compution
var hash;
try {
hash = new (require("crypto").Hash)("md5");
hash.update(JSON.stringify(options.libary || ""));
hash.update(JSON.stringify(options.outputPostfix));
hash.update(JSON.stringify(options.outputJsonpFunction));
hash.update(JSON.stringify(options.publicPrefix));
hash.update(template);
hash.update("1");
} catch(e) {
// if this didn't work
// we do not use a hash
hash = null;
}
// hash as crypto.Hash instance
// for compution
var hash;
try {
hash = new (require("crypto").Hash)("md5");
hash.update(JSON.stringify(options.libary || ""));
hash.update(JSON.stringify(options.outputPostfix));
hash.update(JSON.stringify(options.outputJsonpFunction));
hash.update(JSON.stringify(options.publicPrefix));
hash.update(template);
hash.update("1");
} catch(e) {
// if this didn't work
// we do not use a hash
hash = null;
}
// for each chunk
for(var i = 0; i < chunkIds.length; i++) {
var chunkId = chunkIds[i];
var chunk = depTree.chunks[chunkId];
// for each chunk
for(var i = 0; i < chunkIds.length; i++) {
var chunkId = chunkIds[i];
var chunk = depTree.chunks[chunkId];
// check is chunk is empty or a duplicate
if(chunk.empty) continue;
if(chunk.equals !== undefined) continue;
chunksCount++;
// check is chunk is empty or a duplicate
if(chunk.empty) continue;
if(chunk.equals !== undefined) continue;
chunksCount++;
// build filename
var filename = chunk.filename = chunk.realId === 0 ? options.output : chunk.realId + options.outputPostfix;
// build filename
var filename = chunk.filename = chunk.realId === 0 ? options.output : chunk.realId + options.outputPostfix;
// get content of chunk
var content = writeChunk(depTree, chunk, options);
if(hash) hash.update(content);
buffer = [];
if(chunk.realId === 0) { // initial chunk
if(hash)
hash = hash.digest("hex");
else
hash = "";
// get content of chunk
var content = writeChunk(depTree, chunk, options);
if(hash) hash.update(content);
buffer = [];
if(chunk.realId === 0) { // initial chunk
if(hash)
hash = hash.digest("hex");
else
hash = "";
// if it should be a libary, we prepend a variable name
if(options.libary) {
buffer.push("/******/var ");
buffer.push(options.libary);
buffer.push("=\n");
}
// write the template
buffer.push(template);
// write extra info into object
buffer.push("/******/({");
if(chunkIds.length > 1) {
buffer.push("a:");
buffer.push(JSON.stringify(options.outputPostfix.replace(HASH_REGEXP, hash)));
buffer.push(",b:");
buffer.push(JSON.stringify(options.outputJsonpFunction));
buffer.push(",");
}
buffer.push("c:");
buffer.push(JSON.stringify(options.publicPrefix.replace(HASH_REGEXP, hash)));
buffer.push(",\n");
} else { // lazy loaded chunk
// write only jsonp function and chunk id as function call
buffer.push("/******/");
buffer.push(options.outputJsonpFunction);
buffer.push("(");
buffer.push(chunk.realId);
buffer.push(", {\n");
}
// write content of chunk
buffer.push(content);
// and close object
buffer.push("/******/})");
// convert buffer to string
buffer = buffer.join("");
// minimize if wished
try {
if(options.minimize) buffer = uglify(buffer, path.join(options.outputDirectory, filename));
} catch(e) {
return callback(e);
// if it should be a libary, we prepend a variable name
if(options.libary) {
buffer.push("/******/var ");
buffer.push(options.libary);
buffer.push("=\n");
}
// push it as "file write"
options.emitFile(filename, buffer, true);
}
options.events.emit("task-end", "prepare chunks");
// write the template
buffer.push(template);
if(options.noWrite) return writingFinished();
options.events.emit("start-writing", hash);
// recursive create dir
function createDir(dir, callback) {
fileExists(dir, function(exists) {
if(exists)
callback();
else {
fs.mkdir(dir, function(err) {
if(err) {
var parentDir = path.join(dir, "..");
if(parentDir == dir)
return callback(err);
createDir(parentDir, function(err) {
if(err) return callback(err);
fs.mkdir(dir, function(err) {
if(err) return callback(err);
callback();
});
});
return;
}
callback();
});
}
});
}
// create output directory
var outDir = options.outputDirectory.replace(HASH_REGEXP, hash);
createDir(outDir, function(err) {
options.events.emit("task-end", "create ouput directory");
if(err) return callback(err);
writeFiles();
});
// collect file sizes
var fileSizeMap = {};
// do the writing of all generated files
function writeFiles() {
var remFiles = fileWrites.length;
fileWrites.forEach(function(writeAction) {
var writeActionFileName = writeAction[0].replace(HASH_REGEXP, hash)
options.events.emit("task", "write " + writeActionFileName);
fileSizeMap[path.basename(writeActionFileName)] = writeAction[1].length;
fs.writeFile(writeActionFileName, writeAction[1], "utf-8", function(err) {
options.events.emit("task-end", "write " + writeActionFileName);
if(err) throw err;
remFiles--;
if(remFiles === 0)
writingFinished();
});
});
if(fileWrites.length == 0) writingFinished();
}
// after writing: generate statistics
function writingFinished() {
// Stats
buffer = {};
buffer.hash = hash;
buffer.chunkCount = chunksCount;
buffer.modulesCount = Object.keys(depTree.modules).length;
var sum = 0;
chunkIds.reverse().forEach(function(chunkId) {
var chunk = depTree.chunks[chunkId]
if(!chunk.filename) return;
var modulesArray = [];
for(var moduleId in chunk.modules) {
if(chunk.modules[moduleId] === "include") {
var modu = depTree.modules[moduleId];
modulesArray.push({
id: modu.realId,
size: modu.size,
filename: modu.filename,
dirname: modu.dirname,
fromCache: modu.fromCache,
toCache: modu.toCache,
reasons: modu.reasons});
sum++;
}
}
modulesArray.sort(function(a, b) {
return a.id - b.id;
});
fileModulesMap[path.basename(chunk.filename)] = modulesArray;
});
buffer.modulesIncludingDuplicates = sum;
buffer.modulesPerChunk = Math.round(sum / chunksCount*10)/10; // DEPRECATED: useless info
sum = 0;
for(var moduleId in depTree.chunks.main.modules) {
if(depTree.chunks.main.modules[moduleId] === "include")
sum++;
// write extra info into object
buffer.push("/******/({");
if(chunkIds.length > 1) {
buffer.push("a:");
buffer.push(JSON.stringify(options.outputPostfix.replace(HASH_REGEXP, hash)));
buffer.push(",b:");
buffer.push(JSON.stringify(options.outputJsonpFunction));
buffer.push(",");
}
buffer.modulesFirstChunk = sum;
buffer.fileSizes = fileSizeMap;
buffer.warnings = depTree.warnings;
buffer.errors = depTree.errors;
buffer.fileModules = fileModulesMap;
buffer.subStats = subStats;
buffer.time = new Date() - startTime;
options.events.emit("task-end", "statistics");
options.events.emit("bundle", buffer);
callback(null, buffer);
buffer.push("c:");
buffer.push(JSON.stringify(options.publicPrefix.replace(HASH_REGEXP, hash)));
buffer.push(",\n");
} else { // lazy loaded chunk
// write only jsonp function and chunk id as function call
buffer.push("/******/");
buffer.push(options.outputJsonpFunction);
buffer.push("(");
buffer.push(chunk.realId);
buffer.push(", {\n");
}
} else { // if options.output not set, we write to stdout
// THIS IS DEPRECATED
if(options.libary) {
buffer.push("/******/var ");
buffer.push(options.libary);
buffer.push("=\n");
}
buffer.push(getTemplate(options, {chunks: false}));
buffer.push("/******/({\n");
buffer.push(writeChunk(depTree, options));
// write content of chunk
buffer.push(content);
// and close object
buffer.push("/******/})");
// convert buffer to string
buffer = buffer.join("");
// minimize if wished
try {
if(options.minimize) buffer = uglify(buffer, "output");
options.events.emit("task-end", "statistics");
callback(null, buffer);
if(options.minimize) buffer = uglify(buffer, path.join(options.outputDirectory, filename));
} catch(e) {
callback(e);
return callback(e);
}
// push it as "file write"
options.emitFile(filename, buffer, true);
}
options.events.emit("task-end", "prepare chunks");
if(options.noWrite) return writingFinished();
options.events.emit("start-writing", hash);
// recursive create dir
function createDir(dir, callback) {
fileExists(dir, function(exists) {
if(exists)
callback();
else {
fs.mkdir(dir, function(err) {
if(err) {
var parentDir = path.join(dir, "..");
if(parentDir == dir)
return callback(err);
createDir(parentDir, function(err) {
if(err) return callback(err);
fs.mkdir(dir, function(err) {
if(err) return callback(err);
callback();
});
});
return;
}
callback();
});
}
});
}
// create output directory
var outDir = options.outputDirectory.replace(HASH_REGEXP, hash);
createDir(outDir, function(err) {
options.events.emit("task-end", "create ouput directory");
if(err) return callback(err);
writeFiles();
});
// collect file sizes
var fileSizeMap = {};
// do the writing of all generated files
function writeFiles() {
var remFiles = fileWrites.length;
fileWrites.forEach(function(writeAction) {
var writeActionFileName = writeAction[0].replace(HASH_REGEXP, hash)
options.events.emit("task", "write " + writeActionFileName);
fileSizeMap[path.basename(writeActionFileName)] = writeAction[1].length;
fs.writeFile(writeActionFileName, writeAction[1], "utf-8", function(err) {
options.events.emit("task-end", "write " + writeActionFileName);
if(err) throw err;
remFiles--;
if(remFiles === 0)
writingFinished();
});
});
if(fileWrites.length == 0) writingFinished();
}
// after writing: generate statistics
function writingFinished() {
// Stats
buffer = {};
buffer.hash = hash;
buffer.chunkCount = chunksCount;
buffer.modulesCount = Object.keys(depTree.modules).length;
var sum = 0;
chunkIds.reverse().forEach(function(chunkId) {
var chunk = depTree.chunks[chunkId]
if(!chunk.filename) return;
var modulesArray = [];
for(var moduleId in chunk.modules) {
if(chunk.modules[moduleId] === "include") {
var modu = depTree.modules[moduleId];
modulesArray.push({
id: modu.realId,
size: modu.size,
filename: modu.filename,
dirname: modu.dirname,
fromCache: modu.fromCache,
toCache: modu.toCache,
reasons: modu.reasons});
sum++;
}
}
modulesArray.sort(function(a, b) {
return a.id - b.id;
});
fileModulesMap[path.basename(chunk.filename)] = modulesArray;
});
buffer.modulesIncludingDuplicates = sum;
buffer.modulesPerChunk = Math.round(sum / chunksCount*10)/10; // DEPRECATED: useless info
sum = 0;
for(var moduleId in depTree.chunks.main.modules) {
if(depTree.chunks.main.modules[moduleId] === "include")
sum++;
}
buffer.modulesFirstChunk = sum;
buffer.fileSizes = fileSizeMap;
buffer.warnings = depTree.warnings;
buffer.errors = depTree.errors;
buffer.fileModules = fileModulesMap;
buffer.subStats = subStats;
buffer.time = new Date() - startTime;
options.events.emit("task-end", "statistics");
options.events.emit("bundle", buffer);
callback(null, buffer);
}
});
return options.events;

View File

@ -8,14 +8,14 @@
"optimist": "0.2.x",
"uglify-js": "1.2.x",
"sprintf": "0.1.x",
"enhanced-require": "0.1.x",
"enhanced-require": "0.2.x",
"enhanced-resolve": "0.2.x",
"raw-loader": "0.1.x",
"json-loader": "0.1.x",
"jade-loader": "0.1.x",
"coffee-loader": "0.1.x",
"css-loader": "0.2.x",
"less-loader": "0.1.x",
"less-loader": "0.2.x",
"style-loader": "0.1.x",
"script-loader": "0.1.x",
"bundle-loader": "0.1.x",

View File

@ -1 +0,0 @@
module.exports = require("enhanced-require")

View File

@ -4,63 +4,9 @@
*/
var should = require("should");
var path = require("path");
require = require("../require-polyfill")(require.valueOf());
require = require("enhanced-require")(__filename);
describe("polyfill", function() {
describe("require.context", function() {
var context = require.context("./fixtures")
it("should be able to require a file without extension", function() {
var a = context("./a");
should.exist(a);
a.should.be.a("function");
a().should.be.equal("This is a");
});
it("should be able to require a file with extension", function() {
var a = context("./a.js");
should.exist(a);
a.should.be.a("function");
a().should.be.equal("This is a");
});
it("should be able to require a file in a subdirectory", function() {
var complex1 = context("./lib/complex1");
should.exist(complex1);
complex1.should.be.equal("lib complex1");
});
it("should throw an exception if the module does not exists", function() {
(function() {
context("./notExists.js");
}).should.throw(/Cannot find module/);
});
});
describe("require.ensure", function() {
it("should be executed synchron", function() {
var executed = false;
var oldRequire = require;
require.ensure([], function(require) {
executed = true;
should.exist(require);
require.should.be.a("function");
require.should.be.equal(oldRequire);
});
executed.should.be.ok;
});
it("should work with modules list", function() {
require.ensure(["./fixtures/a"], function(require) {
var a = require("./fixtures/a");
should.exist(a);
a.should.be.a("function");
a().should.be.equal("This is a");
});
});
});
describe("loader", function() {
describe("raw", function() {
it("should load abc", function() {